Skip to content

Commit cd94ec5

Browse files
committed
IMPROVED: Removed redundant nullification of deleted pointers
The previous code included setting pointers to `NULL` after `delete[]`, which was unnecessary at some parts of the code.
1 parent 3a45ba2 commit cd94ec5

1 file changed

Lines changed: 0 additions & 15 deletions

File tree

src/NeuralNetwork.h

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1077,59 +1077,50 @@ class NeuralNetwork
10771077
for (int j = 0; j < layers[i]._numberOfOutputs; j++) // because of this i wont make _numberOfOutputs/inputs private :/ or maybe.. i ll see... or i will change them to const* ... what? i've just read it again lol
10781078
{
10791079
delete[] layers[i].weights[j];
1080-
layers[i].weights[j] = NULL;
10811080
}
10821081
#endif
10831082

10841083
// #if !defined(USE_PROGMEM)
10851084
#if !defined(NO_BIAS)
10861085
delete layers[i].bias;
1087-
layers[i].bias = NULL;
10881086
#endif
10891087
// #endif
10901088

10911089
#if !defined(REDUCE_RAM_DELETE_OUTPUTS)
10921090
delete[] layers[i].outputs;
1093-
layers[i].outputs = NULL;
10941091
#endif
10951092

10961093
/*
10971094
#if defined(REDUCE_RAM_WEIGHTS_LVL1) // && !defined(USE_PROGMEM) // no need for progmem condition because progmem is never going to be initialized with new
10981095
delete[] layers[i].weights;
1099-
layers[i].weights = NULL;
11001096
#endif
11011097
*/
11021098
}
11031099

11041100
#if defined(REDUCE_RAM_WEIGHTS_LVL2) // && !defined(USE_PROGMEM) // no need for progmem condition because progmem is never going to be initialized with new
11051101
delete weights;
1106-
weights = NULL;
11071102
#endif
11081103
}else{
11091104
#if !defined(REDUCE_RAM_DELETE_OUTPUTS)
11101105
for (int i = 0; i < numberOflayers; i++){
11111106
delete[] layers[i].outputs;
1112-
layers[i].outputs = NULL;
11131107
}
11141108
#endif
11151109
}
11161110
#elif !defined(REDUCE_RAM_DELETE_OUTPUTS)
11171111
for (int i = 0; i < numberOflayers; i++){
11181112
delete[] layers[i].outputs;
1119-
layers[i].outputs = NULL;
11201113
}
11211114
#endif
11221115

11231116
#if defined(ACTIVATION__PER_LAYER) && defined(SUPPORTS_SD_FUNCTIONALITY)
11241117
if (isAlreadyLoadedOnce){
11251118
delete[] ActFunctionPerLayer;
1126-
ActFunctionPerLayer = NULL;
11271119
}
11281120
#endif
11291121

11301122
if (numberOflayers !=0){
11311123
delete[] layers;
1132-
layers = NULL; // 18/5/2019
11331124
}
11341125
}
11351126
NeuralNetwork::~NeuralNetwork() { pdestract(); }
@@ -1381,7 +1372,6 @@ class NeuralNetwork
13811372
#endif
13821373
#if defined(REDUCE_RAM_DELETE_OUTPUTS)
13831374
delete[] layers[i - 1].outputs;
1384-
layers[i - 1].outputs = NULL;
13851375
#endif
13861376
}
13871377

@@ -1447,7 +1437,6 @@ class NeuralNetwork
14471437
#endif
14481438
#if defined(REDUCE_RAM_DELETE_OUTPUTS)
14491439
delete[] layers[i - 1].outputs;
1450-
layers[i - 1].outputs = NULL;
14511440
#endif
14521441
}
14531442
#if defined(USE_INTERNAL_EEPROM)
@@ -1478,16 +1467,12 @@ class NeuralNetwork
14781467
{
14791468
layers[i].BackPropHidden(&layers[i + 1], layers[i - 1].outputs);
14801469
delete[] layers[i + 1].preLgamma;
1481-
layers[i + 1].preLgamma = NULL; // 18/5/2019
14821470
}
14831471

14841472
layers[0].BackPropHidden(&layers[1], _inputs);
14851473

14861474
delete[] layers[1].preLgamma;
14871475
delete[] layers[0].preLgamma;
1488-
1489-
layers[0].preLgamma = NULL;
1490-
layers[1].preLgamma = NULL;
14911476
}
14921477
#endif
14931478

0 commit comments

Comments
 (0)