tesseract v5.3.3.20231005
tesseract::Parallel Class Reference

#include <parallel.h>

Inheritance diagram for tesseract::Parallel:
tesseract::Plumbing tesseract::Network

Public Member Functions

TESS_API Parallel (const char *name, NetworkType type)
 
StaticShape OutputShape (const StaticShape &input_shape) const override
 
std::string spec () const override
 
void Forward (bool debug, const NetworkIO &input, const TransposedArray *input_transpose, NetworkScratch *scratch, NetworkIO *output) override
 
bool Backward (bool debug, const NetworkIO &fwd_deltas, NetworkScratch *scratch, NetworkIO *back_deltas) override
 
- Public Member Functions inherited from tesseract::Plumbing
 Plumbing (const std::string &name)
 
 ~Plumbing () override
 
StaticShape InputShape () const override
 
std::string spec () const override
 
bool IsPlumbingType () const override
 
void SetEnableTraining (TrainingState state) override
 
void SetNetworkFlags (uint32_t flags) override
 
int InitWeights (float range, TRand *randomizer) override
 
int RemapOutputs (int old_no, const std::vector< int > &code_map) override
 
void ConvertToInt () override
 
void SetRandomizer (TRand *randomizer) override
 
virtual void AddToStack (Network *network)
 
bool SetupNeedsBackprop (bool needs_backprop) override
 
int XScaleFactor () const override
 
void CacheXScaleFactor (int factor) override
 
void DebugWeights () override
 
const std::vector< Network * > & stack () const
 
void EnumerateLayers (const std::string *prefix, std::vector< std::string > &layers) const
 
NetworkGetLayer (const char *id) const
 
float LayerLearningRate (const char *id)
 
void ScaleLayerLearningRate (const char *id, double factor)
 
void SetLayerLearningRate (const char *id, float learning_rate)
 
float * LayerLearningRatePtr (const char *id)
 
bool Serialize (TFile *fp) const override
 
bool DeSerialize (TFile *fp) override
 
void Update (float learning_rate, float momentum, float adam_beta, int num_samples) override
 
void CountAlternators (const Network &other, TFloat *same, TFloat *changed) const override
 
- Public Member Functions inherited from tesseract::Network
 Network ()
 
 Network (NetworkType type, const std::string &name, int ni, int no)
 
virtual ~Network ()=default
 
NetworkType type () const
 
bool IsTraining () const
 
bool needs_to_backprop () const
 
int num_weights () const
 
int NumInputs () const
 
int NumOutputs () const
 
virtual StaticShape InputShape () const
 
virtual StaticShape OutputShape (const StaticShape &input_shape) const
 
const std::string & name () const
 
virtual std::string spec () const
 
bool TestFlag (NetworkFlags flag) const
 
virtual bool IsPlumbingType () const
 
virtual void SetEnableTraining (TrainingState state)
 
virtual void SetNetworkFlags (uint32_t flags)
 
virtual int InitWeights (float range, TRand *randomizer)
 
virtual int RemapOutputs (int old_no, const std::vector< int > &code_map)
 
virtual void ConvertToInt ()
 
virtual void SetRandomizer (TRand *randomizer)
 
virtual bool SetupNeedsBackprop (bool needs_backprop)
 
virtual int XScaleFactor () const
 
virtual void CacheXScaleFactor (int factor)
 
virtual void DebugWeights ()=0
 
virtual bool Serialize (TFile *fp) const
 
virtual bool DeSerialize (TFile *fp)=0
 
virtual void Update (float learning_rate, float momentum, float adam_beta, int num_samples)
 
virtual void CountAlternators (const Network &other, TFloat *same, TFloat *changed) const
 
virtual void Forward (bool debug, const NetworkIO &input, const TransposedArray *input_transpose, NetworkScratch *scratch, NetworkIO *output)=0
 
virtual bool Backward (bool debug, const NetworkIO &fwd_deltas, NetworkScratch *scratch, NetworkIO *back_deltas)=0
 
void DisplayForward (const NetworkIO &matrix)
 
void DisplayBackward (const NetworkIO &matrix)
 

Additional Inherited Members

- Static Public Member Functions inherited from tesseract::Network
static NetworkCreateFromFile (TFile *fp)
 
static void ClearWindow (bool tess_coords, const char *window_name, int width, int height, ScrollView **window)
 
static int DisplayImage (Image pix, ScrollView *window)
 
- Protected Member Functions inherited from tesseract::Network
TFloat Random (TFloat range)
 
- Protected Attributes inherited from tesseract::Plumbing
std::vector< Network * > stack_
 
std::vector< float > learning_rates_
 
- Protected Attributes inherited from tesseract::Network
NetworkType type_
 
TrainingState training_
 
bool needs_to_backprop_
 
int32_t network_flags_
 
int32_t ni_
 
int32_t no_
 
int32_t num_weights_
 
std::string name_
 
ScrollViewforward_win_
 
ScrollViewbackward_win_
 
TRandrandomizer_
 

Detailed Description

Definition at line 26 of file parallel.h.

Constructor & Destructor Documentation

◆ Parallel()

tesseract::Parallel::Parallel ( const char *  name,
NetworkType  type 
)

Definition at line 34 of file parallel.cpp.

34 : Plumbing(name) {
35 type_ = type;
36}
NetworkType type_
Definition: network.h:300
const std::string & name() const
Definition: network.h:140
NetworkType type() const
Definition: network.h:110
Plumbing(const std::string &name)
Definition: plumbing.cpp:24

Member Function Documentation

◆ Backward()

bool tesseract::Parallel::Backward ( bool  debug,
const NetworkIO fwd_deltas,
NetworkScratch scratch,
NetworkIO back_deltas 
)
overridevirtual

Implements tesseract::Network.

Definition at line 113 of file parallel.cpp.

114 {
115 // If this parallel is a replicator of convolvers, or holds a 1-d LSTM pair,
116 // or a 2-d LSTM quad, do debug locally, and don't pass the flag on.
117 if (debug && type_ != NT_PARALLEL) {
118#ifndef GRAPHICS_DISABLED
119 DisplayBackward(fwd_deltas);
120#endif
121 debug = false;
122 }
123 auto stack_size = stack_.size();
124 if (type_ == NT_PAR_2D_LSTM) {
125 // Special case, run parallel in parallel.
126 std::vector<NetworkScratch::IO> in_deltas(stack_size);
127 std::vector<NetworkScratch::IO> out_deltas(stack_size);
128 // Split the forward deltas for each stack element.
129 int feature_offset = 0;
130 for (unsigned i = 0; i < stack_.size(); ++i) {
131 int num_features = stack_[i]->NumOutputs();
132 in_deltas[i].Resize(fwd_deltas, num_features, scratch);
133 out_deltas[i].Resize(fwd_deltas, stack_[i]->NumInputs(), scratch);
134 in_deltas[i]->CopyUnpacking(fwd_deltas, feature_offset, num_features);
135 feature_offset += num_features;
136 }
137#ifdef _OPENMP
138# pragma omp parallel for num_threads(stack_size)
139#endif
140 for (unsigned i = 0; i < stack_size; ++i) {
141 stack_[i]->Backward(debug, *in_deltas[i], scratch, i == 0 ? back_deltas : out_deltas[i]);
142 }
143 if (needs_to_backprop_) {
144 for (unsigned i = 1; i < stack_size; ++i) {
145 back_deltas->AddAllToFloat(*out_deltas[i]);
146 }
147 }
148 } else {
149 // Revolving partial deltas.
150 NetworkScratch::IO in_deltas(fwd_deltas, scratch);
151 // The sum of deltas from different sources, which will eventually go into
152 // back_deltas.
153 NetworkScratch::IO out_deltas;
154 int feature_offset = 0;
155 for (unsigned i = 0; i < stack_.size(); ++i) {
156 int num_features = stack_[i]->NumOutputs();
157 in_deltas->CopyUnpacking(fwd_deltas, feature_offset, num_features);
158 feature_offset += num_features;
159 if (stack_[i]->Backward(debug, *in_deltas, scratch, back_deltas)) {
160 if (i == 0) {
161 out_deltas.ResizeFloat(*back_deltas, back_deltas->NumFeatures(), scratch);
162 out_deltas->CopyAll(*back_deltas);
163 } else if (back_deltas->NumFeatures() == out_deltas->NumFeatures()) {
164 // Widths are allowed to be different going back, as we may have
165 // input nets, so only accumulate the deltas if the widths are the
166 // same.
167 out_deltas->AddAllToFloat(*back_deltas);
168 }
169 }
170 }
171 if (needs_to_backprop_) {
172 back_deltas->CopyAll(*out_deltas);
173 }
174 }
175 if (needs_to_backprop_) {
176 back_deltas->ScaleFloatBy(1.0f / stack_size);
177 }
178 return needs_to_backprop_;
179}
@ NT_PARALLEL
Definition: network.h:47
@ NT_PAR_2D_LSTM
Definition: network.h:51
bool needs_to_backprop_
Definition: network.h:302
void DisplayBackward(const NetworkIO &matrix)
Definition: network.cpp:341
int NumInputs() const
Definition: network.h:122
bool Backward(bool debug, const NetworkIO &fwd_deltas, NetworkScratch *scratch, NetworkIO *back_deltas) override
Definition: parallel.cpp:113
std::vector< Network * > stack_
Definition: plumbing.h:147

◆ Forward()

void tesseract::Parallel::Forward ( bool  debug,
const NetworkIO input,
const TransposedArray input_transpose,
NetworkScratch scratch,
NetworkIO output 
)
overridevirtual

Implements tesseract::Network.

Definition at line 52 of file parallel.cpp.

53 {
54 bool parallel_debug = false;
55 // If this parallel is a replicator of convolvers, or holds a 1-d LSTM pair,
56 // or a 2-d LSTM quad, do debug locally, and don't pass the flag on.
57 if (debug && type_ != NT_PARALLEL) {
58 parallel_debug = true;
59 debug = false;
60 }
61 int stack_size = stack_.size();
62 if (type_ == NT_PAR_2D_LSTM) {
63 // Special case, run parallel in parallel.
64 std::vector<NetworkScratch::IO> results(stack_size);
65 for (int i = 0; i < stack_size; ++i) {
66 results[i].Resize(input, stack_[i]->NumOutputs(), scratch);
67 }
68#ifdef _OPENMP
69# pragma omp parallel for num_threads(stack_size)
70#endif
71 for (int i = 0; i < stack_size; ++i) {
72 stack_[i]->Forward(debug, input, nullptr, scratch, results[i]);
73 }
74 // Now pack all the results (serially) into the output.
75 int out_offset = 0;
76 output->Resize(*results[0], NumOutputs());
77 for (int i = 0; i < stack_size; ++i) {
78 out_offset = output->CopyPacking(*results[i], out_offset);
79 }
80 } else {
81 // Revolving intermediate result.
82 NetworkScratch::IO result(input, scratch);
83 // Source for divided replicated.
84 NetworkScratch::IO source_part;
85 TransposedArray *src_transpose = nullptr;
86 if (IsTraining() && type_ == NT_REPLICATED) {
87 // Make a transposed copy of the input.
88 input.Transpose(&transposed_input_);
89 src_transpose = &transposed_input_;
90 }
91 // Run each network, putting the outputs into result.
92 int out_offset = 0;
93 for (int i = 0; i < stack_size; ++i) {
94 stack_[i]->Forward(debug, input, src_transpose, scratch, result);
95 // All networks must have the same output width
96 if (i == 0) {
97 output->Resize(*result, NumOutputs());
98 } else {
99 ASSERT_HOST(result->Width() == output->Width());
100 }
101 out_offset = output->CopyPacking(*result, out_offset);
102 }
103 }
104#ifndef GRAPHICS_DISABLED
105 if (parallel_debug) {
107 }
108#endif
109}
#define ASSERT_HOST(x)
Definition: errcode.h:54
@ NT_REPLICATED
Definition: network.h:48
int NumOutputs() const
Definition: network.h:125
void DisplayForward(const NetworkIO &matrix)
Definition: network.cpp:333
bool IsTraining() const
Definition: network.h:113

◆ OutputShape()

StaticShape tesseract::Parallel::OutputShape ( const StaticShape input_shape) const
overridevirtual

Reimplemented from tesseract::Network.

Definition at line 40 of file parallel.cpp.

40 {
41 StaticShape result = stack_[0]->OutputShape(input_shape);
42 int stack_size = stack_.size();
43 for (int i = 1; i < stack_size; ++i) {
44 StaticShape shape = stack_[i]->OutputShape(input_shape);
45 result.set_depth(result.depth() + shape.depth());
46 }
47 return result;
48}

◆ spec()

std::string tesseract::Parallel::spec ( ) const
inlineoverridevirtual

Reimplemented from tesseract::Network.

Definition at line 36 of file parallel.h.

36 {
37 std::string spec;
38 if (type_ == NT_PAR_2D_LSTM) {
39 // We have 4 LSTMs operating in parallel here, so the size of each is
40 // the number of outputs/4.
41 spec += "L2xy" + std::to_string(no_ / 4);
42 } else if (type_ == NT_PAR_RL_LSTM) {
43 // We have 2 LSTMs operating in parallel here, so the size of each is
44 // the number of outputs/2.
45 if (stack_[0]->type() == NT_LSTM_SUMMARY) {
46 spec += "Lbxs" + std::to_string(no_ / 2);
47 } else {
48 spec += "Lbx" + std::to_string(no_ / 2);
49 }
50 } else {
51 if (type_ == NT_REPLICATED) {
52 spec += "R" + std::to_string(stack_.size()) + "(" + stack_[0]->spec();
53 } else {
54 for (auto &it : stack_) {
55 spec += it->spec();
56 }
57 }
58 spec += ")";
59 }
60 return spec;
61 }
@ NT_LSTM_SUMMARY
Definition: network.h:59
@ NT_PAR_RL_LSTM
Definition: network.h:49
std::string spec() const override
Definition: parallel.h:36

The documentation for this class was generated from the following files: