PetaVision  Alpha
MaskLayer.cpp
1 
2 /*
3  * MaskLayer.cpp
4  *
5  * Created on: Mar 21, 2014
6  * Author: slundquist
7  */
8 
9 #include "MaskLayer.hpp"
10 
11 namespace PV {
12 
13 MaskLayer::MaskLayer(const char *name, HyPerCol *hc) {
14  initialize_base();
15  initialize(name, hc);
16 }
17 
18 MaskLayer::MaskLayer() {
19  initialize_base();
20  // initialize() gets called by subclass's initialize method
21 }
22 
23 MaskLayer::~MaskLayer() {
24  if (maskLayerName) {
25  free(maskLayerName);
26  }
27  if (features) {
28  free(features);
29  }
30  if (maskMethod) {
31  free(maskMethod);
32  }
33 }
34 
35 int MaskLayer::initialize_base() {
36  maskLayerName = NULL;
37  maskLayer = NULL;
38  maskMethod = NULL;
39  features = NULL;
40 
41  return PV_SUCCESS;
42 }
43 
44 int MaskLayer::ioParamsFillGroup(enum ParamsIOFlag ioFlag) {
45  int status = ANNLayer::ioParamsFillGroup(ioFlag);
46  ioParam_maskMethod(ioFlag);
47  ioParam_maskLayerName(ioFlag);
48  ioParam_featureIdxs(ioFlag);
49  return status;
50 }
51 
52 void MaskLayer::ioParam_maskMethod(enum ParamsIOFlag ioFlag) {
53  parent->parameters()->ioParamStringRequired(ioFlag, name, "maskMethod", &maskMethod);
54  // Check valid methods
55  if (strcmp(maskMethod, "layer") == 0) {
56  }
57  else if (strcmp(maskMethod, "invertLayer") == 0) {
58  }
59  else if (strcmp(maskMethod, "maskFeatures") == 0) {
60  }
61  else if (strcmp(maskMethod, "noMaskFeatures") == 0) {
62  }
63  else {
64  if (parent->columnId() == 0) {
65  ErrorLog().printf(
66  "%s: \"%s\" is not a valid maskMethod. Options are \"layer\", \"invertLayer\", "
67  "\"maskFeatures\", or \"noMaskFeatures\".\n",
68  getDescription_c(),
69  maskMethod);
70  }
71  exit(-1);
72  }
73 }
74 
75 void MaskLayer::ioParam_maskLayerName(enum ParamsIOFlag ioFlag) {
76  assert(!parent->parameters()->presentAndNotBeenRead(name, "maskMethod"));
77  if (strcmp(maskMethod, "layer") == 0 || strcmp(maskMethod, "invertLayer") == 0) {
78  parent->parameters()->ioParamStringRequired(ioFlag, name, "maskLayerName", &maskLayerName);
79  }
80 }
81 
82 void MaskLayer::ioParam_featureIdxs(enum ParamsIOFlag ioFlag) {
83  assert(!parent->parameters()->presentAndNotBeenRead(name, "maskMethod"));
84  if (strcmp(maskMethod, "maskFeatures") == 0 || strcmp(maskMethod, "noMaskFeatures") == 0) {
85  parent->parameters()->ioParamArray(
86  ioFlag, name, "featureIdxs", &features, &numSpecifiedFeatures);
87  if (numSpecifiedFeatures == 0) {
88  if (parent->columnId() == 0) {
89  ErrorLog().printf(
90  "%s: MaskLayer must specify at least one feature for maskMethod \"%s\".\n",
91  getDescription_c(),
92  maskMethod);
93  }
94  exit(-1);
95  }
96  }
97 }
98 
99 Response::Status
100 MaskLayer::communicateInitInfo(std::shared_ptr<CommunicateInitInfoMessage const> message) {
101  auto status = ANNLayer::communicateInitInfo(message);
102  if (!Response::completed(status)) {
103  return status;
104  }
105  if (strcmp(maskMethod, "layer") == 0 || strcmp(maskMethod, "invertLayer") == 0) {
106  maskLayer = message->lookup<HyPerLayer>(std::string(maskLayerName));
107  if (maskLayer == NULL) {
108  if (parent->columnId() == 0) {
109  ErrorLog().printf(
110  "%s: maskLayerName \"%s\" is not a layer in the HyPerCol.\n",
111  getDescription_c(),
112  maskLayerName);
113  }
114  MPI_Barrier(parent->getCommunicator()->communicator());
115  exit(EXIT_FAILURE);
116  }
117 
118  const PVLayerLoc *maskLoc = maskLayer->getLayerLoc();
119  const PVLayerLoc *loc = getLayerLoc();
120  assert(maskLoc != NULL && loc != NULL);
121  if (maskLoc->nxGlobal != loc->nxGlobal || maskLoc->nyGlobal != loc->nyGlobal) {
122  if (parent->columnId() == 0) {
123  ErrorLog(errorMessage);
124  errorMessage.printf(
125  "%s: maskLayerName \"%s\" does not have the same x and y dimensions.\n",
126  getDescription_c(),
127  maskLayerName);
128  errorMessage.printf(
129  " original (nx=%d, ny=%d, nf=%d) versus (nx=%d, ny=%d, nf=%d)\n",
130  maskLoc->nxGlobal,
131  maskLoc->nyGlobal,
132  maskLoc->nf,
133  loc->nxGlobal,
134  loc->nyGlobal,
135  loc->nf);
136  }
137  MPI_Barrier(parent->getCommunicator()->communicator());
138  exit(EXIT_FAILURE);
139  }
140 
141  if (maskLoc->nf != 1 && maskLoc->nf != loc->nf) {
142  if (parent->columnId() == 0) {
143  ErrorLog(errorMessage);
144  errorMessage.printf(
145  "%s: maskLayerName \"%s\" must either have the same number of features as this "
146  "layer, or one feature.\n",
147  getDescription_c(),
148  maskLayerName);
149  errorMessage.printf(
150  " original (nx=%d, ny=%d, nf=%d) versus (nx=%d, ny=%d, nf=%d)\n",
151  maskLoc->nxGlobal,
152  maskLoc->nyGlobal,
153  maskLoc->nf,
154  loc->nxGlobal,
155  loc->nyGlobal,
156  loc->nf);
157  }
158  MPI_Barrier(parent->getCommunicator()->communicator());
159  exit(EXIT_FAILURE);
160  }
161 
162  assert(maskLoc->nx == loc->nx && maskLoc->ny == loc->ny);
163  }
164  else {
165  // Check for in bounds featureIdxs
166  assert(features);
167  const PVLayerLoc *loc = getLayerLoc();
168  for (int f = 0; f < numSpecifiedFeatures; f++) {
169  if (features[f] < 0 || features[f] >= loc->nf) {
170  Fatal() << "Specified feature " << features[f] << "out of bounds\n";
171  }
172  }
173  }
174 
175  return Response::SUCCESS;
176 }
177 
178 Response::Status MaskLayer::updateState(double time, double dt) {
179  ANNLayer::updateState(time, dt);
180 
181  float *A = getCLayer()->activity->data;
182  float *V = getV();
183  int num_channels = getNumChannels();
184  float *gSynHead = GSyn == NULL ? NULL : GSyn[0];
185  const PVLayerLoc *loc = getLayerLoc();
186 
187  int nx = loc->nx;
188  int ny = loc->ny;
189  int nf = loc->nf;
190  int num_neurons = nx * ny * nf;
191  int nbatch = loc->nbatch;
192 
193  int method = -1;
194  const int METHOD_LAYER = 0;
195  const int METHOD_INVERT_LAYER = 1;
196  const int METHOD_FEATURES = 2;
197  const int METHOD_INVERT_FEATURES = 3;
198 
199  if (strcmp(maskMethod, "layer") == 0) {
200  method = METHOD_LAYER;
201  }
202  else if (strcmp(maskMethod, "invertLayer") == 0) {
203  method = METHOD_INVERT_LAYER;
204  }
205  else if (strcmp(maskMethod, "maskFeatures") == 0) {
206  method = METHOD_FEATURES;
207  }
208  else if (strcmp(maskMethod, "noMaskFeatures") == 0) {
209  method = METHOD_INVERT_FEATURES;
210  }
211 
212  for (int b = 0; b < nbatch; b++) {
213  float *ABatch = A + b * getNumExtended();
214 #ifdef PV_USE_OPENMP_THREADS
215 #pragma omp parallel for
216 #endif
217  for (int ni = 0; ni < num_neurons; ni++) {
218  int kThisRes = ni;
219  int kThisExt = kIndexExtended(
220  ni, nx, ny, nf, loc->halo.lt, loc->halo.rt, loc->halo.dn, loc->halo.up);
221  float maskVal = 1;
222 
223  switch (method) {
224  case METHOD_LAYER: {
225  const PVLayerLoc *maskLoc = maskLayer->getLayerLoc();
226  float *maskActivity = maskLayer->getActivity();
227  float *maskActivityBatch = maskActivity + b * maskLayer->getNumExtended();
228  int kMaskRes;
229  if (maskLoc->nf == 1) {
230  kMaskRes = ni / nf;
231  }
232  else {
233  kMaskRes = ni;
234  }
235  int kMaskExt = kIndexExtended(
236  kMaskRes,
237  nx,
238  ny,
239  maskLoc->nf,
240  maskLoc->halo.lt,
241  maskLoc->halo.rt,
242  maskLoc->halo.dn,
243  maskLoc->halo.up);
244  maskVal = maskActivityBatch[kMaskExt];
245  } break;
246  case METHOD_INVERT_LAYER: {
247  const PVLayerLoc *maskLoc = maskLayer->getLayerLoc();
248  float *maskActivity = maskLayer->getActivity();
249  float *maskActivityBatch = maskActivity + b * maskLayer->getNumExtended();
250  int kMaskRes;
251  if (maskLoc->nf == 1) {
252  kMaskRes = ni / nf;
253  }
254  else {
255  kMaskRes = ni;
256  }
257  int kMaskExt = kIndexExtended(
258  kMaskRes,
259  nx,
260  ny,
261  maskLoc->nf,
262  maskLoc->halo.lt,
263  maskLoc->halo.rt,
264  maskLoc->halo.dn,
265  maskLoc->halo.up);
266  if (maskActivityBatch[kMaskExt]) {
267  maskVal = 0;
268  }
269  } break;
270  case METHOD_FEATURES: {
271  // Calculate feature index of ni
272  int featureNum = featureIndex(ni, nx, ny, nf);
273  maskVal = 1; // If nothing specified, copy everything
274  for (int specF = 0; specF < numSpecifiedFeatures; specF++) {
275  if (featureNum == features[specF]) {
276  maskVal = 0;
277  break;
278  }
279  }
280  } break;
281  case METHOD_INVERT_FEATURES: {
282  // Calculate feature index of ni
283  int featureNum = featureIndex(ni, nx, ny, nf);
284  maskVal = 0; // If nothing specified, copy nothing
285  for (int specF = 0; specF < numSpecifiedFeatures; specF++) {
286  if (featureNum == features[specF]) {
287  maskVal = 1;
288  break;
289  }
290  }
291  } break;
292  default: break;
293  }
294 
295  // Set value to 0, otherwise, updateState from ANNLayer should have taken care of it
296  if (maskVal == 0) {
297  ABatch[kThisExt] = 0;
298  }
299  }
300  }
301  return Response::SUCCESS;
302 }
303 
304 } /* namespace PV */
static bool completed(Status &a)
Definition: Response.hpp:49
virtual int ioParamsFillGroup(enum ParamsIOFlag ioFlag) override
Definition: MaskLayer.cpp:44
virtual int ioParamsFillGroup(enum ParamsIOFlag ioFlag) override
Definition: ANNLayer.cpp:89