csci5521/quiz/5/myLReLULayer.m

39 lines
1 KiB
Mathematica
Raw Permalink Normal View History

2023-11-24 05:44:01 +00:00
classdef myLReLULayer < nnet.layer.Layer
properties
% Scaling coefficient
Alpha
end
methods
function layer = myLReLULayer(scale, name)
% Create an examplePreluLayer with numChannels channels
% Set layer name
if nargin == 2
layer.Name = name;
end
% Set layer description
layer.Description = ...
['myLReLULayer with ', num2str(scale), ' channels'];
layer.Alpha = scale;
end
function Z = predict(layer, X)
% Forward input data through the layer and output the result
Z = max(X, layer.Alpha*X);
end
function dLdX= backward(layer, X, Z, dLdZ, memory)
% Backward propagate the derivative of the loss function through
% the layer
dLdX = layer.Alpha*dLdZ;
dLdX(X>0) = dLdZ(X>0);
end
end
end