Fix memory leak
This commit is contained in:
parent
2f39db3f52
commit
7668a249d9
16
src/cltest.c
16
src/cltest.c
@ -53,22 +53,22 @@ void train(clm_NN nn, unsigned int numElements, clm_Vector *inputs, clm_Vector *
|
||||
currentXs = nn.layers[i].output;
|
||||
}
|
||||
|
||||
clm_Linear lastLayer = nn.layers[nn.numLayers - 1];
|
||||
clm_Linear *lastLayer = &nn.layers[nn.numLayers - 1];
|
||||
for(unsigned int b = 0; b < batchSize; b++) {
|
||||
// Error of last layer = y - yhat
|
||||
clm_matrixCopy(batchOutputs[b], lastLayer.error[b]); // lastLayer.error = y
|
||||
clm_matrixSubtractMatrix(lastLayer.error[b], lastLayer.output[b]); // lastLayer.error -= yhat
|
||||
clm_matrixCopy(batchOutputs[b], lastLayer->error[b]); // lastLayer.error = y
|
||||
clm_matrixSubtractMatrix(lastLayer->error[b], lastLayer->output[b]); // lastLayer.error -= yhat
|
||||
}
|
||||
|
||||
for(int i = nn.numLayers - 1; i >= 0; i--) {
|
||||
clm_Linear layer = nn.layers[i];
|
||||
clm_Linear *layer = &nn.layers[i];
|
||||
clm_Matrix *inputsToThisLayer = i == 0 ? batchInputs : nn.layers[i - 1].output;
|
||||
clm_Matrix *outputsOfThisLayer = layer.output;
|
||||
clm_linearBackprop(&layer, nn.learnRate, batchSize, inputsToThisLayer, outputsOfThisLayer, layer.error, i > 0, i == 0 ? NULL : nn.layers[i - 1].error, layer.weightsError, layer.gradient);
|
||||
clm_Matrix *outputsOfThisLayer = layer->output;
|
||||
clm_linearBackprop(layer, nn.learnRate, batchSize, inputsToThisLayer, outputsOfThisLayer, layer->error, i > 0, i == 0 ? NULL : nn.layers[i - 1].error, layer->weightsError, layer->gradient);
|
||||
|
||||
for(unsigned int b = 0; b < batchSize; b++) {
|
||||
clm_matrixAddMatrix(layer.weights, layer.weightsError[b]);
|
||||
clm_matrixAddMatrix(layer.bias, layer.gradient[b]);
|
||||
clm_matrixAddMatrix(layer->weights, layer->weightsError[b]);
|
||||
clm_matrixAddMatrix(layer->bias, layer->gradient[b]);
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user