diff --git a/pascal/p001-Basic-Neuron-inputs.pas b/pascal/p001-Basic-Neuron-inputs.pas new file mode 100644 index 0000000..ff04d61 --- /dev/null +++ b/pascal/p001-Basic-Neuron-inputs.pas @@ -0,0 +1,12 @@ +program NeuronExample; + +var + inputs: array[0..2] of Real = (1.2, 5.1, 2.1); + weights: array[0..2] of Real = (3.1, 2.1, 8.7); + bias: Real = 3.0; + output: Real; + +begin + output := inputs[0]*weights[0] + inputs[1]*weights[1] + inputs[2]*weights[2] + bias; + WriteLn(output); +end. diff --git a/pascal/p002-Basic-Neuron-Layer.pas b/pascal/p002-Basic-Neuron-Layer.pas new file mode 100644 index 0000000..43fe8bb --- /dev/null +++ b/pascal/p002-Basic-Neuron-Layer.pas @@ -0,0 +1,19 @@ +program Neuron; +var + inputs: array[0..3] of Real = (1, 2, 3, 2.5); + weights1: array[0..3] of Real = (0.2, 0.8, -0.5, 1); + weights2: array[0..3] of Real = (0.5, -0.91, 0.26, -0.5); + weights3: array[0..3] of Real = (-0.26, -0.27, 0.17, 0.87); + bias1, bias2, bias3: Real; + output: array[0..2] of Real; +begin + bias1 := 2; + bias2 := 3; + bias3 := 0.5; + + output[0] := (inputs[0] * weights1[0] + inputs[1] * weights1[1] + inputs[2] * weights1[2] + inputs[3] * weights1[3]) + bias1; + output[1] := (inputs[0] * weights2[0] + inputs[1] * weights2[1] + inputs[2] * weights2[2] + inputs[3] * weights2[3]) + bias2; + output[2] := (inputs[0] * weights3[0] + inputs[1] * weights3[1] + inputs[2] * weights3[2] + inputs[3] * weights3[3]) + bias3; + + WriteLn(output[0], ' ', output[1], ' ', output[2]); +end. diff --git a/pascal/p003-Dot-Product.pas b/pascal/p003-Dot-Product.pas new file mode 100644 index 0000000..f9b235b --- /dev/null +++ b/pascal/p003-Dot-Product.pas @@ -0,0 +1,19 @@ +uses Math; + +var + inputs: array[0..3] of Double = (1, 2, 3, 2.5); + weights: array[0..2, 0..3] of Double = ((0.2, 0.8, -0.5, 1), (0.5, -0.91, 0.26, -0.5), (-0.26, -0.27, 0.17, 0.87)); + biases: array[0..2] of Double = (2, 3, 0.5); + output: array[0..2] of Double; + i, j: Integer; +begin + for i := 0 to 2 do + begin + output[i] := biases[i]; + for j := 0 to 3 do + output[i] := output[i] + weights[i, j] * inputs[j]; + end; + + for i := 0 to 2 do + WriteLn(output[i]); +end. diff --git a/pascal/p004-Layers-and-Object.pas b/pascal/p004-Layers-and-Object.pas new file mode 100644 index 0000000..150539c --- /dev/null +++ b/pascal/p004-Layers-and-Object.pas @@ -0,0 +1,69 @@ +program NeuralNetwork; + +uses Math, SysUtils; + +type + TMatrix = array of array of Double; + + TLayerDense = class + private + weights: TMatrix; + biases: TMatrix; + output: TMatrix; + public + constructor Create(n_inputs, n_neurons: Integer); + procedure Forward(inputs: TMatrix); + end; + +constructor TLayerDense.Create(n_inputs, n_neurons: Integer); +var + i, j: Integer; +begin + SetLength(weights, n_inputs, n_neurons); + SetLength(biases, 1, n_neurons); + for i := 0 to n_inputs - 1 do + for j := 0 to n_neurons - 1 do + weights[i, j] := 0.10 * Random - 0.05; + FillChar(biases[0, 0], SizeOf(Double) * n_neurons, 0); +end; + +procedure TLayerDense.Forward(inputs: TMatrix); +var + i, j, k: Integer; +begin + SetLength(output, Length(inputs), Length(weights[0])); + for i := 0 to Length(inputs) - 1 do + for j := 0 to Length(weights[0]) - 1 do + begin + output[i, j] := biases[0, j]; + for k := 0 to Length(inputs[0]) - 1 do + output[i, j] := output[i, j] + inputs[i, k] * weights[k, j]; + end; +end; + +var + X: TMatrix; + layer1, layer2: TLayerDense; + i, j: Integer; +begin + Randomize; + + SetLength(X, 3, 4); + X[0] := [1, 2, 3, 2.5]; + X[1] := [2.0, 5.0, -1.0, 2.0]; + X[2] := [-1.5, 2.7, 3.3, -0.8]; + + layer1 := TLayerDense.Create(4, 5); + layer2 := TLayerDense.Create(5, 2); + + layer1.Forward(X); + // WriteLn(layer1.output); + layer2.Forward(layer1.output); + + for i := 0 to Length(layer2.output) - 1 do + begin + for j := 0 to Length(layer2.output[0]) - 1 do + Write(layer2.output[i, j]:0:2, ' '); + WriteLn; + end; +end. diff --git a/pascal/p005-ReLU-Activation.pas b/pascal/p005-ReLU-Activation.pas new file mode 100644 index 0000000..bee6b57 --- /dev/null +++ b/pascal/p005-ReLU-Activation.pas @@ -0,0 +1,79 @@ +program SpiralData; + +uses + SysUtils, Math; + +type + TLayerDense = record + weights: array of array of Double; + biases: array of Double; + output: array of Double; + end; + + TActivationReLU = record + output: array of Double; + end; + +procedure InitializeLayerDense(var layer: TLayerDense; nInputs, nNeurons: Integer); +var + i, j: Integer; +begin + SetLength(layer.weights, nInputs, nNeurons); + SetLength(layer.biases, 1, nNeurons); + + for i := 0 to nInputs - 1 do + for j := 0 to nNeurons - 1 do + layer.weights[i, j] := 0.10 * Random - 0.05; + + FillChar(layer.biases[0], nNeurons * SizeOf(Double), 0); +end; + +procedure ForwardLayerDense(var layer: TLayerDense; inputs: array of array of Double); +var + i, j: Integer; +begin + SetLength(layer.output, Length(inputs), Length(layer.biases[0])); + + for i := 0 to High(inputs) do + for j := 0 to High(layer.biases[0]) do + layer.output[i, j] := inputs[i, 0] * layer.weights[0, j] + + inputs[i, 1] * layer.weights[1, j] + + layer.biases[0, j]; +end; + +procedure InitializeActivationReLU(var activation: TActivationReLU; inputs: array of array of Double); +var + i, j: Integer; +begin + SetLength(activation.output, Length(inputs), Length(inputs[0])); + + for i := 0 to High(inputs) do + for j := 0 to High(inputs[0]) do + activation.output[i, j] := Max(0, inputs[i, j]); +end; + +var + layer1: TLayerDense; + activation1: TActivationReLU; + X, y: array of array of Double; + i, j: Integer; +begin + Randomize; + + SetLength(X, 100, 2); + SetLength(y, 100, 1); + + // Assign values to X and y arrays + + InitializeLayerDense(layer1, 2, 5); + ForwardLayerDense(layer1, X); + + InitializeActivationReLU(activation1, layer1.output); + + for i := 0 to High(activation1.output) do + begin + for j := 0 to High(activation1.output[0]) do + Write(activation1.output[i, j]:0:2, ' '); + Writeln; + end; +end.