PetaVision  Alpha
AbstractNormProbe.cpp
1 /*
2  * AbstractNormProbe.cpp
3  *
4  * Created on: Aug 11, 2015
5  * Author: pschultz
6  */
7 
8 #include "AbstractNormProbe.hpp"
9 #include "columns/HyPerCol.hpp"
10 #include "layers/HyPerLayer.hpp"
11 #include <limits>
12 
13 namespace PV {
14 
15 AbstractNormProbe::AbstractNormProbe() : LayerProbe() { initialize_base(); }
16 
17 AbstractNormProbe::AbstractNormProbe(const char *name, HyPerCol *hc) : LayerProbe() {
18  initialize_base();
19  initialize(name, hc);
20 }
21 
22 AbstractNormProbe::~AbstractNormProbe() {
23  free(normDescription);
24  normDescription = NULL;
25  free(maskLayerName);
26  maskLayerName = NULL;
27  // Don't free maskLayer, which belongs to the HyPerCol.
28 }
29 
30 int AbstractNormProbe::initialize_base() {
31  normDescription = NULL;
32  maskLayerName = NULL;
33  maskLayer = NULL;
34  singleFeatureMask = false;
35  timeLastComputed = -std::numeric_limits<double>::infinity();
36  return PV_SUCCESS;
37 }
38 
39 int AbstractNormProbe::initialize(const char *name, HyPerCol *hc) {
40  int status = LayerProbe::initialize(name, hc);
41  if (status == PV_SUCCESS) {
42  status = setNormDescription();
43  }
44  return status;
45 }
46 
47 int AbstractNormProbe::ioParamsFillGroup(enum ParamsIOFlag ioFlag) {
48  int status = LayerProbe::ioParamsFillGroup(ioFlag);
49  ioParam_maskLayerName(ioFlag);
50  return status;
51 }
52 
53 void AbstractNormProbe::ioParam_maskLayerName(enum ParamsIOFlag ioFlag) {
54  parent->parameters()->ioParamString(
55  ioFlag, name, "maskLayerName", &maskLayerName, NULL, false /*warnIfAbsent*/);
56 }
57 
58 Response::Status
59 AbstractNormProbe::communicateInitInfo(std::shared_ptr<CommunicateInitInfoMessage const> message) {
60  auto status = LayerProbe::communicateInitInfo(message);
61  if (!Response::completed(status)) {
62  return status;
63  }
64  assert(targetLayer);
65  if (maskLayerName && maskLayerName[0]) {
66  maskLayer = message->lookup<HyPerLayer>(std::string(maskLayerName));
67  if (maskLayer == NULL) {
68  if (parent->columnId() == 0) {
69  ErrorLog().printf(
70  "%s: maskLayerName \"%s\" is not a layer in the HyPerCol.\n",
71  getDescription_c(),
72  maskLayerName);
73  }
74  MPI_Barrier(parent->getCommunicator()->communicator());
75  exit(EXIT_FAILURE);
76  }
77 
78  const PVLayerLoc *maskLoc = maskLayer->getLayerLoc();
79  const PVLayerLoc *loc = targetLayer->getLayerLoc();
80  assert(maskLoc != NULL && loc != NULL);
81  if (maskLoc->nxGlobal != loc->nxGlobal || maskLoc->nyGlobal != loc->nyGlobal) {
82  if (parent->columnId() == 0) {
83  ErrorLog(maskLayerBadSize);
84  maskLayerBadSize.printf(
85  "%s: maskLayerName \"%s\" does not have the "
86  "same x and y dimensions.\n",
87  getDescription_c(),
88  maskLayerName);
89  maskLayerBadSize.printf(
90  " original (nx=%d, ny=%d, nf=%d) versus (nx=%d, ny=%d, nf=%d)\n",
91  maskLoc->nxGlobal,
92  maskLoc->nyGlobal,
93  maskLoc->nf,
94  loc->nxGlobal,
95  loc->nyGlobal,
96  loc->nf);
97  }
98  MPI_Barrier(parent->getCommunicator()->communicator());
99  exit(EXIT_FAILURE);
100  }
101 
102  if (maskLoc->nf != 1 && maskLoc->nf != loc->nf) {
103  if (parent->columnId() == 0) {
104  ErrorLog(maskLayerBadSize);
105  maskLayerBadSize.printf(
106  "%s: maskLayerName \"%s\" must either have the "
107  "same number of features as this "
108  "layer, or one feature.\n",
109  getDescription_c(),
110  maskLayerName);
111  maskLayerBadSize.printf(
112  " original (nx=%d, ny=%d, nf=%d) versus (nx=%d, ny=%d, nf=%d)\n",
113  maskLoc->nxGlobal,
114  maskLoc->nyGlobal,
115  maskLoc->nf,
116  loc->nxGlobal,
117  loc->nyGlobal,
118  loc->nf);
119  }
120  MPI_Barrier(parent->getCommunicator()->communicator());
121  exit(EXIT_FAILURE);
122  }
123  assert(maskLoc->nx == loc->nx && maskLoc->ny == loc->ny);
124  singleFeatureMask = maskLoc->nf == 1 && loc->nf != 1;
125  }
126  return Response::SUCCESS;
127 }
128 
129 int AbstractNormProbe::setNormDescription() { return setNormDescriptionToString("norm"); }
130 
132  normDescription = strdup(s);
133  return normDescription ? PV_SUCCESS : PV_FAILURE;
134 }
135 
136 void AbstractNormProbe::calcValues(double timeValue) {
137  double *valuesBuffer = this->getValuesBuffer();
138  for (int b = 0; b < this->getNumValues(); b++) {
139  valuesBuffer[b] = getValueInternal(timeValue, b);
140  }
141  MPI_Allreduce(
142  MPI_IN_PLACE,
143  valuesBuffer,
144  getNumValues(),
145  MPI_DOUBLE,
146  MPI_SUM,
147  parent->getCommunicator()->communicator());
148 }
149 
150 Response::Status AbstractNormProbe::outputState(double timevalue) {
151  getValues(timevalue);
152  double *valuesBuffer = this->getValuesBuffer();
153  if (!mOutputStreams.empty()) {
154  int nBatch = getNumValues();
155  int nk = getTargetLayer()->getNumGlobalNeurons();
156  for (int b = 0; b < nBatch; b++) {
157  output(b).printf("%6.3f, %d, %8d, %f", timevalue, b, nk, valuesBuffer[b]);
158  output(b) << std::endl;
159  }
160  }
161  return Response::SUCCESS;
162 }
163 
164 } // end namespace PV
virtual void calcValues(double timeValue) override
virtual Response::Status communicateInitInfo(std::shared_ptr< CommunicateInitInfoMessage const > message) override
virtual Response::Status outputState(double timevalue) override
virtual int ioParamsFillGroup(enum ParamsIOFlag ioFlag) override
Definition: BaseProbe.cpp:76
virtual void ioParam_maskLayerName(enum ParamsIOFlag ioFlag)
maskLayerName: Specifies a masking layer to use when calculating the norm. When blank (the default)...
virtual int ioParamsFillGroup(enum ParamsIOFlag ioFlag) override
static bool completed(Status &a)
Definition: Response.hpp:49
int setNormDescriptionToString(char const *s)
int initialize(const char *name, HyPerCol *hc)
Definition: LayerProbe.cpp:38
virtual Response::Status communicateInitInfo(std::shared_ptr< CommunicateInitInfoMessage const > message) override
Definition: LayerProbe.cpp:54
virtual int setNormDescription()