tesseract  4.00.00dev
series.cpp
Go to the documentation of this file.
1 // File: series.cpp
3 // Description: Runs networks in series on the same input.
4 // Author: Ray Smith
5 // Created: Thu May 02 08:26:06 PST 2013
6 //
7 // (C) Copyright 2013, Google Inc.
8 // Licensed under the Apache License, Version 2.0 (the "License");
9 // you may not use this file except in compliance with the License.
10 // You may obtain a copy of the License at
11 // http://www.apache.org/licenses/LICENSE-2.0
12 // Unless required by applicable law or agreed to in writing, software
13 // distributed under the License is distributed on an "AS IS" BASIS,
14 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 // See the License for the specific language governing permissions and
16 // limitations under the License.
18 
19 #include "series.h"
20 
21 #include "fullyconnected.h"
22 #include "networkscratch.h"
23 #include "scrollview.h"
24 #include "tprintf.h"
25 
26 namespace tesseract {
27 
28 // ni_ and no_ will be set by AddToStack.
29 Series::Series(const STRING& name) : Plumbing(name) {
30  type_ = NT_SERIES;
31 }
32 
34 }
35 
36 // Returns the shape output from the network given an input shape (which may
37 // be partially unknown ie zero).
38 StaticShape Series::OutputShape(const StaticShape& input_shape) const {
39  StaticShape result(input_shape);
40  int stack_size = stack_.size();
41  for (int i = 0; i < stack_size; ++i) {
42  result = stack_[i]->OutputShape(result);
43  }
44  return result;
45 }
46 
47 // Sets up the network for training. Initializes weights using weights of
48 // scale `range` picked according to the random number generator `randomizer`.
49 // Note that series has its own implementation just for debug purposes.
50 int Series::InitWeights(float range, TRand* randomizer) {
51  num_weights_ = 0;
52  tprintf("Num outputs,weights in Series:\n");
53  for (int i = 0; i < stack_.size(); ++i) {
54  int weights = stack_[i]->InitWeights(range, randomizer);
55  tprintf(" %s:%d, %d\n",
56  stack_[i]->spec().string(), stack_[i]->NumOutputs(), weights);
57  num_weights_ += weights;
58  }
59  tprintf("Total weights = %d\n", num_weights_);
60  return num_weights_;
61 }
62 
63 // Recursively searches the network for softmaxes with old_no outputs,
64 // and remaps their outputs according to code_map. See network.h for details.
65 int Series::RemapOutputs(int old_no, const std::vector<int>& code_map) {
66  num_weights_ = 0;
67  tprintf("Num (Extended) outputs,weights in Series:\n");
68  for (int i = 0; i < stack_.size(); ++i) {
69  int weights = stack_[i]->RemapOutputs(old_no, code_map);
70  tprintf(" %s:%d, %d\n", stack_[i]->spec().string(),
71  stack_[i]->NumOutputs(), weights);
72  num_weights_ += weights;
73  }
74  tprintf("Total weights = %d\n", num_weights_);
75  no_ = stack_.back()->NumOutputs();
76  return num_weights_;
77 }
78 
79 // Sets needs_to_backprop_ to needs_backprop and returns true if
80 // needs_backprop || any weights in this network so the next layer forward
81 // can be told to produce backprop for this layer if needed.
82 bool Series::SetupNeedsBackprop(bool needs_backprop) {
83  needs_to_backprop_ = needs_backprop;
84  for (int i = 0; i < stack_.size(); ++i)
85  needs_backprop = stack_[i]->SetupNeedsBackprop(needs_backprop);
86  return needs_backprop;
87 }
88 
89 // Returns an integer reduction factor that the network applies to the
90 // time sequence. Assumes that any 2-d is already eliminated. Used for
91 // scaling bounding boxes of truth data.
92 // WARNING: if GlobalMinimax is used to vary the scale, this will return
93 // the last used scale factor. Call it before any forward, and it will return
94 // the minimum scale factor of the paths through the GlobalMinimax.
95 int Series::XScaleFactor() const {
96  int factor = 1;
97  for (int i = 0; i < stack_.size(); ++i)
98  factor *= stack_[i]->XScaleFactor();
99  return factor;
100 }
101 
102 // Provides the (minimum) x scale factor to the network (of interest only to
103 // input units) so they can determine how to scale bounding boxes.
104 void Series::CacheXScaleFactor(int factor) {
105  stack_[0]->CacheXScaleFactor(factor);
106 }
107 
108 // Runs forward propagation of activations on the input line.
109 // See NetworkCpp for a detailed discussion of the arguments.
110 void Series::Forward(bool debug, const NetworkIO& input,
111  const TransposedArray* input_transpose,
112  NetworkScratch* scratch, NetworkIO* output) {
113  int stack_size = stack_.size();
114  ASSERT_HOST(stack_size > 1);
115  // Revolving intermediate buffers.
116  NetworkScratch::IO buffer1(input, scratch);
117  NetworkScratch::IO buffer2(input, scratch);
118  // Run each network in turn, giving the output of n as the input to n + 1,
119  // with the final network providing the real output.
120  stack_[0]->Forward(debug, input, input_transpose, scratch, buffer1);
121  for (int i = 1; i < stack_size; i += 2) {
122  stack_[i]->Forward(debug, *buffer1, NULL, scratch,
123  i + 1 < stack_size ? buffer2 : output);
124  if (i + 1 == stack_size) return;
125  stack_[i + 1]->Forward(debug, *buffer2, NULL, scratch,
126  i + 2 < stack_size ? buffer1 : output);
127  }
128 }
129 
130 // Runs backward propagation of errors on the deltas line.
131 // See NetworkCpp for a detailed discussion of the arguments.
132 bool Series::Backward(bool debug, const NetworkIO& fwd_deltas,
133  NetworkScratch* scratch,
134  NetworkIO* back_deltas) {
135  if (!IsTraining()) return false;
136  int stack_size = stack_.size();
137  ASSERT_HOST(stack_size > 1);
138  // Revolving intermediate buffers.
139  NetworkScratch::IO buffer1(fwd_deltas, scratch);
140  NetworkScratch::IO buffer2(fwd_deltas, scratch);
141  // Run each network in reverse order, giving the back_deltas output of n as
142  // the fwd_deltas input to n-1, with the 0 network providing the real output.
143  if (!stack_.back()->IsTraining() ||
144  !stack_.back()->Backward(debug, fwd_deltas, scratch, buffer1))
145  return false;
146  for (int i = stack_size - 2; i >= 0; i -= 2) {
147  if (!stack_[i]->IsTraining() ||
148  !stack_[i]->Backward(debug, *buffer1, scratch,
149  i > 0 ? buffer2 : back_deltas))
150  return false;
151  if (i == 0) return needs_to_backprop_;
152  if (!stack_[i - 1]->IsTraining() ||
153  !stack_[i - 1]->Backward(debug, *buffer2, scratch,
154  i > 1 ? buffer1 : back_deltas))
155  return false;
156  }
157  return needs_to_backprop_;
158 }
159 
160 // Splits the series after the given index, returning the two parts and
161 // deletes itself. The first part, up to network with index last_start, goes
162 // into start, and the rest goes into end.
163 void Series::SplitAt(int last_start, Series** start, Series** end) {
164  *start = NULL;
165  *end = NULL;
166  if (last_start < 0 || last_start >= stack_.size()) {
167  tprintf("Invalid split index %d must be in range [0,%d]!\n",
168  last_start, stack_.size() - 1);
169  return;
170  }
171  Series* master_series = new Series("MasterSeries");
172  Series* boosted_series = new Series("BoostedSeries");
173  for (int s = 0; s <= last_start; ++s) {
174  if (s + 1 == stack_.size() && stack_[s]->type() == NT_SOFTMAX) {
175  // Change the softmax to a tanh.
176  FullyConnected* fc = static_cast<FullyConnected*>(stack_[s]);
177  fc->ChangeType(NT_TANH);
178  }
179  master_series->AddToStack(stack_[s]);
180  stack_[s] = NULL;
181  }
182  for (int s = last_start + 1; s < stack_.size(); ++s) {
183  boosted_series->AddToStack(stack_[s]);
184  stack_[s] = NULL;
185  }
186  *start = master_series;
187  *end = boosted_series;
188  delete this;
189 }
190 
191 // Appends the elements of the src series to this, removing from src and
192 // deleting it.
194  ASSERT_HOST(src->type() == NT_SERIES);
195  Series* src_series = static_cast<Series*>(src);
196  for (int s = 0; s < src_series->stack_.size(); ++s) {
197  AddToStack(src_series->stack_[s]);
198  src_series->stack_[s] = NULL;
199  }
200  delete src;
201 }
202 
203 
204 } // namespace tesseract.
void AppendSeries(Network *src)
Definition: series.cpp:193
virtual StaticShape OutputShape(const StaticShape &input_shape) const
Definition: series.cpp:38
virtual void CacheXScaleFactor(int factor)
Definition: series.cpp:104
int NumOutputs() const
Definition: network.h:123
virtual STRING spec() const
Definition: series.h:37
virtual void Forward(bool debug, const NetworkIO &input, const TransposedArray *input_transpose, NetworkScratch *scratch, NetworkIO *output)
Definition: series.cpp:110
int RemapOutputs(int old_no, const std::vector< int > &code_map) override
Definition: series.cpp:65
NetworkType type_
Definition: network.h:299
Series(const STRING &name)
Definition: series.cpp:29
void ChangeType(NetworkType type)
void SplitAt(int last_start, Series **start, Series **end)
Definition: series.cpp:163
#define tprintf(...)
Definition: tprintf.h:31
bool IsTraining() const
Definition: network.h:115
PointerVector< Network > stack_
Definition: plumbing.h:136
Definition: strngs.h:45
inT32 num_weights_
Definition: network.h:305
virtual int XScaleFactor() const
Definition: series.cpp:95
#define ASSERT_HOST(x)
Definition: errcode.h:84
NetworkType type() const
Definition: network.h:112
virtual int InitWeights(float range, TRand *randomizer)
Definition: series.cpp:50
virtual bool SetupNeedsBackprop(bool needs_backprop)
Definition: series.cpp:82
virtual bool Backward(bool debug, const NetworkIO &fwd_deltas, NetworkScratch *scratch, NetworkIO *back_deltas)
Definition: series.cpp:132
bool needs_to_backprop_
Definition: network.h:301
virtual ~Series()
Definition: series.cpp:33
virtual void AddToStack(Network *network)
Definition: plumbing.cpp:85