parallel.h 3.2 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788
  1. ///////////////////////////////////////////////////////////////////////
  2. // File: parallel.h
  3. // Description: Runs networks in parallel on the same input.
  4. // Author: Ray Smith
  5. // Created: Thu May 02 08:02:06 PST 2013
  6. //
  7. // (C) Copyright 2013, Google Inc.
  8. // Licensed under the Apache License, Version 2.0 (the "License");
  9. // you may not use this file except in compliance with the License.
  10. // You may obtain a copy of the License at
  11. // http://www.apache.org/licenses/LICENSE-2.0
  12. // Unless required by applicable law or agreed to in writing, software
  13. // distributed under the License is distributed on an "AS IS" BASIS,
  14. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. // See the License for the specific language governing permissions and
  16. // limitations under the License.
  17. ///////////////////////////////////////////////////////////////////////
  18. #ifndef TESSERACT_LSTM_PARALLEL_H_
  19. #define TESSERACT_LSTM_PARALLEL_H_
  20. #include "plumbing.h"
  21. namespace tesseract {
  22. // Runs multiple networks in parallel, interlacing their outputs.
  23. class Parallel : public Plumbing {
  24. public:
  25. // ni_ and no_ will be set by AddToStack.
  26. Parallel(const STRING& name, NetworkType type);
  27. ~Parallel() override = default;
  28. // Returns the shape output from the network given an input shape (which may
  29. // be partially unknown ie zero).
  30. StaticShape OutputShape(const StaticShape& input_shape) const override;
  31. STRING spec() const override {
  32. STRING spec;
  33. if (type_ == NT_PAR_2D_LSTM) {
  34. // We have 4 LSTMs operating in parallel here, so the size of each is
  35. // the number of outputs/4.
  36. spec.add_str_int("L2xy", no_ / 4);
  37. } else if (type_ == NT_PAR_RL_LSTM) {
  38. // We have 2 LSTMs operating in parallel here, so the size of each is
  39. // the number of outputs/2.
  40. if (stack_[0]->type() == NT_LSTM_SUMMARY)
  41. spec.add_str_int("Lbxs", no_ / 2);
  42. else
  43. spec.add_str_int("Lbx", no_ / 2);
  44. } else {
  45. if (type_ == NT_REPLICATED) {
  46. spec.add_str_int("R", stack_.size());
  47. spec += "(";
  48. spec += stack_[0]->spec();
  49. } else {
  50. spec = "(";
  51. for (int i = 0; i < stack_.size(); ++i) spec += stack_[i]->spec();
  52. }
  53. spec += ")";
  54. }
  55. return spec;
  56. }
  57. // Runs forward propagation of activations on the input line.
  58. // See Network for a detailed discussion of the arguments.
  59. void Forward(bool debug, const NetworkIO& input,
  60. const TransposedArray* input_transpose,
  61. NetworkScratch* scratch, NetworkIO* output) override;
  62. // Runs backward propagation of errors on the deltas line.
  63. // See Network for a detailed discussion of the arguments.
  64. bool Backward(bool debug, const NetworkIO& fwd_deltas,
  65. NetworkScratch* scratch,
  66. NetworkIO* back_deltas) override;
  67. private:
  68. // If *this is a NT_REPLICATED, then it feeds a replicated network with
  69. // identical inputs, and it would be extremely wasteful for them to each
  70. // calculate and store the same transpose of the inputs, so Parallel does it
  71. // and passes a pointer to the replicated network, allowing it to use the
  72. // transpose on the next call to Backward.
  73. TransposedArray transposed_input_;
  74. };
  75. } // namespace tesseract.
  76. #endif // TESSERACT_LSTM_PARALLEL_H_