8 #include "AbstractNormProbe.hpp" 9 #include "columns/HyPerCol.hpp" 10 #include "layers/HyPerLayer.hpp" 15 AbstractNormProbe::AbstractNormProbe() : LayerProbe() { initialize_base(); }
17 AbstractNormProbe::AbstractNormProbe(
const char *name, HyPerCol *hc) : LayerProbe() {
22 AbstractNormProbe::~AbstractNormProbe() {
23 free(normDescription);
24 normDescription = NULL;
30 int AbstractNormProbe::initialize_base() {
31 normDescription = NULL;
34 singleFeatureMask =
false;
35 timeLastComputed = -std::numeric_limits<double>::infinity();
39 int AbstractNormProbe::initialize(
const char *name, HyPerCol *hc) {
41 if (status == PV_SUCCESS) {
42 status = setNormDescription();
49 ioParam_maskLayerName(ioFlag);
54 parent->parameters()->ioParamString(
55 ioFlag, name,
"maskLayerName", &maskLayerName, NULL,
false );
65 if (maskLayerName && maskLayerName[0]) {
66 maskLayer = message->lookup<
HyPerLayer>(std::string(maskLayerName));
67 if (maskLayer == NULL) {
68 if (parent->columnId() == 0) {
70 "%s: maskLayerName \"%s\" is not a layer in the HyPerCol.\n",
74 MPI_Barrier(parent->getCommunicator()->communicator());
78 const PVLayerLoc *maskLoc = maskLayer->getLayerLoc();
79 const PVLayerLoc *loc = targetLayer->getLayerLoc();
80 assert(maskLoc != NULL && loc != NULL);
81 if (maskLoc->nxGlobal != loc->nxGlobal || maskLoc->nyGlobal != loc->nyGlobal) {
82 if (parent->columnId() == 0) {
83 ErrorLog(maskLayerBadSize);
84 maskLayerBadSize.printf(
85 "%s: maskLayerName \"%s\" does not have the " 86 "same x and y dimensions.\n",
89 maskLayerBadSize.printf(
90 " original (nx=%d, ny=%d, nf=%d) versus (nx=%d, ny=%d, nf=%d)\n",
98 MPI_Barrier(parent->getCommunicator()->communicator());
102 if (maskLoc->nf != 1 && maskLoc->nf != loc->nf) {
103 if (parent->columnId() == 0) {
104 ErrorLog(maskLayerBadSize);
105 maskLayerBadSize.printf(
106 "%s: maskLayerName \"%s\" must either have the " 107 "same number of features as this " 108 "layer, or one feature.\n",
111 maskLayerBadSize.printf(
112 " original (nx=%d, ny=%d, nf=%d) versus (nx=%d, ny=%d, nf=%d)\n",
120 MPI_Barrier(parent->getCommunicator()->communicator());
123 assert(maskLoc->nx == loc->nx && maskLoc->ny == loc->ny);
124 singleFeatureMask = maskLoc->nf == 1 && loc->nf != 1;
126 return Response::SUCCESS;
132 normDescription = strdup(s);
133 return normDescription ? PV_SUCCESS : PV_FAILURE;
137 double *valuesBuffer = this->getValuesBuffer();
138 for (
int b = 0; b < this->getNumValues(); b++) {
139 valuesBuffer[b] = getValueInternal(timeValue, b);
147 parent->getCommunicator()->communicator());
151 getValues(timevalue);
152 double *valuesBuffer = this->getValuesBuffer();
153 if (!mOutputStreams.empty()) {
154 int nBatch = getNumValues();
155 int nk = getTargetLayer()->getNumGlobalNeurons();
156 for (
int b = 0; b < nBatch; b++) {
157 output(b).printf(
"%6.3f, %d, %8d, %f", timevalue, b, nk, valuesBuffer[b]);
158 output(b) << std::endl;
161 return Response::SUCCESS;
virtual void calcValues(double timeValue) override
virtual Response::Status communicateInitInfo(std::shared_ptr< CommunicateInitInfoMessage const > message) override
virtual Response::Status outputState(double timevalue) override
virtual int ioParamsFillGroup(enum ParamsIOFlag ioFlag) override
virtual void ioParam_maskLayerName(enum ParamsIOFlag ioFlag)
maskLayerName: Specifies a masking layer to use when calculating the norm. When blank (the default)...
virtual int ioParamsFillGroup(enum ParamsIOFlag ioFlag) override
static bool completed(Status &a)
int setNormDescriptionToString(char const *s)
int initialize(const char *name, HyPerCol *hc)
virtual Response::Status communicateInitInfo(std::shared_ptr< CommunicateInitInfoMessage const > message) override
virtual int setNormDescription()