tesseract  3.05.02
neuron.h
Go to the documentation of this file.
1 // Copyright 2008 Google Inc.
2 // All Rights Reserved.
3 // Author: ahmadab@google.com (Ahmad Abdulkader)
4 //
5 // neuron.h: Declarations of a class for an object that
6 // represents a single neuron in a neural network
7 //
8 // Licensed under the Apache License, Version 2.0 (the "License");
9 // you may not use this file except in compliance with the License.
10 // You may obtain a copy of the License at
11 // http://www.apache.org/licenses/LICENSE-2.0
12 // Unless required by applicable law or agreed to in writing, software
13 // distributed under the License is distributed on an "AS IS" BASIS,
14 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 // See the License for the specific language governing permissions and
16 // limitations under the License.
17 
18 #ifndef NEURON_H
19 #define NEURON_H
20 
21 #include <math.h>
22 #include <vector>
23 
24 #ifdef USE_STD_NAMESPACE
25 using std::vector;
26 #endif
27 
28 namespace tesseract {
29 
30 // Input Node bias values
31 static const float kInputNodeBias = 0.0f;
32 
33 class Neuron {
34  public:
35  // Types of nodes
36  enum NeuronTypes {
37  Unknown = 0,
41  };
42  Neuron();
43  ~Neuron();
44  // set the forward dirty flag indicating that the
45  // activation of the net is not fresh
46  void Clear() {
47  frwd_dirty_ = true;
48  }
49  // Read a binary representation of the neuron info from
50  // an input buffer.
51  template <class BuffType> bool ReadBinary(BuffType *input_buff) {
52  float val;
53  if (input_buff->Read(&val, sizeof(val)) != sizeof(val)) {
54  return false;
55  }
56  // input nodes should have no biases
57  if (node_type_ == Input) {
58  bias_ = kInputNodeBias;
59  } else {
60  bias_ = val;
61  }
62  // read fanin count
63  int fan_in_cnt;
64  if (input_buff->Read(&fan_in_cnt, sizeof(fan_in_cnt)) !=
65  sizeof(fan_in_cnt)) {
66  return false;
67  }
68  // validate fan-in cnt
69  if (fan_in_cnt != fan_in_.size()) {
70  return false;
71  }
72  // read the weights
73  for (int in = 0; in < fan_in_cnt; in++) {
74  if (input_buff->Read(&val, sizeof(val)) != sizeof(val)) {
75  return false;
76  }
77  *(fan_in_weights_[in]) = val;
78  }
79  return true;
80  }
81 
82  // Add a new connection from this neuron *From*
83  // a target neuron using specfied params
84  // Note that what is actually copied in this function are pointers to the
85  // specified Neurons and weights and not the actualt values. This is by
86  // design to centralize the alloction of neurons and weights and so
87  // increase the locality of reference and improve cache-hits resulting
88  // in a faster net. This technique resulted in a 2X-10X speedup
89  // (depending on network size and processor)
90  void AddFromConnection(Neuron *neuron_vec,
91  float *wts_offset,
92  int from_cnt);
93  // Set the type of a neuron
94  void set_node_type(NeuronTypes type);
95  // Computes the output of the node by
96  // "pulling" the output of the fan-in nodes
97  void FeedForward();
98  // fast computation of sigmoid function using a lookup table
99  // defined in sigmoid_table.cpp
100  static float Sigmoid(float activation);
101  // Accessor functions
102  float output() const {
103  return output_;
104  }
105  void set_output(float out_val) {
106  output_ = out_val;
107  }
108  int id() const {
109  return id_;
110  }
111  int fan_in_cnt() const {
112  return fan_in_.size();
113  }
114  Neuron * fan_in(int idx) const {
115  return fan_in_[idx];
116  }
117  float fan_in_wts(int idx) const {
118  return *(fan_in_weights_[idx]);
119  }
120  void set_id(int id) {
121  id_ = id;
122  }
123  float bias() const {
124  return bias_;
125  }
127  return node_type_;
128  }
129 
130  protected:
131  // Type of Neuron
133  // unqique id of the neuron
134  int id_;
135  // node bias
136  float bias_;
137  // node net activation
138  float activation_;
139  // node output
140  float output_;
141  // pointers to fanin nodes
142  vector<Neuron *> fan_in_;
143  // pointers to fanin weights
144  vector<float *> fan_in_weights_;
145  // Sigmoid function lookup table used for fast computation
146  // of sigmoid function
147  static const float kSigmoidTable[];
148  // flag determining if the activation of the node
149  // is fresh or not (dirty)
151  // Initializer
152  void Init();
153 };
154 }
155 
156 #endif // NEURON_H__
vector< float * > fan_in_weights_
Definition: neuron.h:144
void set_node_type(NeuronTypes type)
Definition: neuron.cpp:71
Neuron::NeuronTypes node_type() const
Definition: neuron.h:126
static const float kSigmoidTable[]
Definition: neuron.h:147
int fan_in_cnt() const
Definition: neuron.h:111
bool frwd_dirty_
Definition: neuron.h:150
vector< Neuron * > fan_in_
Definition: neuron.h:142
Neuron * fan_in(int idx) const
Definition: neuron.h:114
float output() const
Definition: neuron.h:102
float fan_in_wts(int idx) const
Definition: neuron.h:117
void AddFromConnection(Neuron *neuron_vec, float *wts_offset, int from_cnt)
Definition: neuron.cpp:83
bool ReadBinary(BuffType *input_buff)
Definition: neuron.h:51
void FeedForward()
Definition: neuron.cpp:48
void set_id(int id)
Definition: neuron.h:120
NeuronTypes node_type_
Definition: neuron.h:132
void Clear()
Definition: neuron.h:46
void set_output(float out_val)
Definition: neuron.h:105
static float Sigmoid(float activation)
Definition: neuron.cpp:94
float activation_
Definition: neuron.h:138
float bias() const
Definition: neuron.h:123
int id() const
Definition: neuron.h:108