diff --git a/CHANGELOG.md b/CHANGELOG.md index ba65c0b..e45abd0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,40 +1,48 @@ -# 0.4.5 +## 0.5.0 + +- Update licence bearer and links. +- Bump dependencies. +- Bump the SDK version to `3.0.0`. +- BREAKING: Remove the `sprint` dependency, which is inappropriate for a package of this + kind. Replaced logging with documented exceptions. + +## 0.4.5 - Relicensed from GPLv3 to MIT. -# 0.4.4 +## 0.4.4 - Update SDK version from `2.12.0` to `2.17.0`. - Updated project description to make it more accurate in describing what `synadart` actually is. -# 0.4.3 +## 0.4.3 - Bumped version of `sprint` from `1.0.2+3` to `1.0.3`. - Updated repository, homepage and issue tracker links. - Refactored and made formatting and style changes to bring the project up to par. -# 0.4.2+1 +## 0.4.2+1 - Updated package description. -# 0.4.2 +## 0.4.2 - Updated `sprint` version from `1.0.0+1` to `1.0.2+3`. - Replaced the now discontinued `pedantic` with the `words` lint ruleset. - Reversed the order of versions in `CHANGELOG.md` from ascending to descending. -# 0.4.1+1 +## 0.4.1+1 - Refactored code. - Removed `logger.dart` in favour of the `sprint` package. -# 0.4.1 +## 0.4.1 - Updated documentation. -# 0.4.0 +## 0.4.0 - Organised code. - Replaced network types such as `feed-forward` or `deep feed-forward` with a @@ -44,44 +52,44 @@ user to a preset model. - Updated `example.dart` and `README.md`. -# 0.3.2 +## 0.3.2 - Added a simple feed-forward network model. -# 0.3.1 +## 0.3.1 - Added 5 new activation functions: `SeLU`, `Softplus`, `Softsign`, `Swish` and `Gaussian`. - Renamed the 'logistic' function to 'sigmoid'. - Created function `abs()` for obtaining the absolute value of a variable. -# 0.3.0 +## 0.3.0 - Updated documentation of `Logger`, `Backpropagation` and `ValueGenerator`. - Created `/examples` directory with a file `example.dart` that displays the network being used to recognise the number '5'. -# Version 0.2.5 +## 0.2.5 - Renamed 'Multilayer Perceptron' to 'Deep Feed-Forward', since 'deep feed-forward' is broader as a concept than 'multi-layer perceptrons'. -# Version 0.2.4 +## 0.2.4 - Updated documentation of `activation.dart`, having added explanations for the different activation functions. -# Version 0.2.3 +## 0.2.3 - Updated documentation of `Network`. - Replaced `process()` in `Layer` with an `output` getter, simplifying the method of getting each `Neuron`'s output. -# Version 0.2.2 +## 0.2.2 - Updated documentation of `Layer` and removed a chunk of dead code. -# Version 0.2.1 +## 0.2.1 - Removed the feed-forward network and simple perceptrons in favour of an upcoming simpler implementation of networks, through the use of a single @@ -90,16 +98,16 @@ `0.2`. - Updated documentation of `Neuron`. -# Version 0.2.0 +## 0.2.0 - Added a feed-forward network and simple perceptrons. - Added `LReLU`, `eLU` and `tanh` activation functions. - Renamed 'sigmoid' to 'logistic'. -# Version 0.1.1 +## 0.1.1 - Added `README.md` and updated formatting. -# Version 0.1.0 +## 0.1.0 - Implemented a multilayer perceptron and a basic algorithm for backpropagation. diff --git a/LICENSE b/LICENSE index 072281e..922eb11 100644 --- a/LICENSE +++ b/LICENSE @@ -1,21 +1,20 @@ MIT License -Copyright (c) 2022 WordCollector +Copyright (c) 2023 Dorian "vxern" Oszczęda -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/example/example.dart b/example/example.dart index 4228de1..36304bf 100644 --- a/example/example.dart +++ b/example/example.dart @@ -16,7 +16,7 @@ void main() { Dense( size: 1, activation: ActivationAlgorithm.sigmoid, - ) + ), ], ); diff --git a/lib/src/layers/layer.dart b/lib/src/layers/layer.dart index a2c80de..e810e96 100644 --- a/lib/src/layers/layer.dart +++ b/lib/src/layers/layer.dart @@ -1,7 +1,3 @@ -import 'dart:io'; - -import 'package:sprint/sprint.dart'; - import 'package:synadart/src/activation.dart'; import 'package:synadart/src/neurons/neuron.dart'; import 'package:synadart/src/utils/mathematical_operations.dart'; @@ -10,9 +6,6 @@ import 'package:synadart/src/utils/mathematical_operations.dart'; /// 'column' of `Neurons` that can be manipulated through accepting new data and /// trained. class Layer { - /// `Sprint` instance for logging messages. - final Sprint log = Sprint('Layer'); - /// The algorithm used for activating `Neurons`. final ActivationAlgorithm activation; @@ -33,13 +26,14 @@ class Layer { /// /// [activation] - The algorithm used for determining how active `Neurons` are /// contained within this layer. + /// + /// ⚠️ Throws a [FormatException] if the size of the layer is less than 1. Layer({ required this.size, required this.activation, }) { if (size < 1) { - log.severe('A layer must contain at least one neuron.'); - exit(0); + throw const FormatException('A layer must contain at least one neuron.'); } } diff --git a/lib/src/networks/network.dart b/lib/src/networks/network.dart index 92aa474..8f5a64d 100644 --- a/lib/src/networks/network.dart +++ b/lib/src/networks/network.dart @@ -1,5 +1,3 @@ -import 'package:sprint/sprint.dart'; - import 'package:synadart/src/layers/layer.dart'; /// Representation of a neural network containing `Layers`, which each further @@ -13,9 +11,6 @@ class Network { /// Used for performance analysis as well as general information logging. final Stopwatch stopwatch = Stopwatch(); - /// `Sprint` instance for logging messages. - final Sprint log = Sprint('Network'); - /// The `Layers` part of this `Network`. final List layers = []; @@ -56,8 +51,6 @@ class Network { ); layers.add(layer); - - log.info('Added layer of size ${layer.neurons.length}.'); } /// Adds a list of `Layers` to this `Network`. @@ -69,15 +62,14 @@ class Network { /// Clears the `Network` by removing all `Layers`, thereby returning it to its /// initial, empty state. + /// + /// ⚠️ Throws a [StateError] if the network has already been cleared. void clear() { if (layers.isEmpty) { - log.warning('Attempted to reset an already empty network.'); - return; + throw StateError('Attempted to reset an already empty network.'); } stopwatch.reset(); layers.clear(); - - log.success('Network reset successfully.'); } } diff --git a/lib/src/networks/training/backpropagation.dart b/lib/src/networks/training/backpropagation.dart index 24f3317..c3422f9 100644 --- a/lib/src/networks/training/backpropagation.dart +++ b/lib/src/networks/training/backpropagation.dart @@ -1,8 +1,5 @@ -import 'dart:io'; - import 'package:synadart/src/networks/network.dart'; import 'package:synadart/src/utils/mathematical_operations.dart'; -import 'package:synadart/src/utils/utils.dart'; /// Extension to `Network` that allows it to train by performing /// backpropagation. @@ -29,58 +26,44 @@ mixin Backpropagation on Network { /// /// [iterations] - How many times the `Network` should perform backpropagation /// using the provided inputs and expected values. + /// + /// ⚠️ Throws a [FormatException] if the: + /// - The [inputs] and [expected] vectors are empty. + /// - The [inputs] and [expected] vectors are of different sizes. + /// - The number of iterations is less than 1. void train({ required List> inputs, required List> expected, required int iterations, + @Deprecated( + 'The package no longer logs messages, thus the quiet mode no longer ' + 'serves a purpose.', + ) bool quiet = false, }) { if (inputs.isEmpty || expected.isEmpty) { - log.severe('Both inputs and expected results must not be empty.'); - exit(0); + throw const FormatException( + 'Both inputs and expected results must not be empty.', + ); } if (inputs.length != expected.length) { - log.severe( + throw const FormatException( 'Inputs and expected result lists must be of the same length.', ); - return; } if (iterations < 1) { - log.severe( + throw const FormatException( 'You cannot train a network without granting it at least one ' 'iteration.', ); - return; - } - - // Perform backpropagation without any additional metrics overhead - if (quiet) { - for (var iteration = 0; iteration < iterations; iteration++) { - for (var index = 0; index < inputs.length; index++) { - propagateBackwards(inputs[index], expected[index]); - } - } - return; } for (var iteration = 0; iteration < iterations; iteration++) { - stopwatch.start(); - for (var index = 0; index < inputs.length; index++) { propagateBackwards(inputs[index], expected[index]); } - - stopwatch.stop(); - - if (iteration % 500 == 0) { - log.info( - 'Iterations: $iteration/$iterations ~ ETA: ${secondsToETA((stopwatch.elapsedMicroseconds * (iterations - iteration)) ~/ 1000000)}', - ); - } - - stopwatch.reset(); } } } diff --git a/lib/src/neurons/neuron.dart b/lib/src/neurons/neuron.dart index c2720c2..8750876 100644 --- a/lib/src/neurons/neuron.dart +++ b/lib/src/neurons/neuron.dart @@ -1,8 +1,5 @@ -import 'dart:io'; import 'dart:math'; -import 'package:sprint/sprint.dart'; - import 'package:synadart/src/activation.dart'; import 'package:synadart/src/utils/mathematical_operations.dart'; import 'package:synadart/src/utils/value_generator.dart'; @@ -12,9 +9,6 @@ import 'package:synadart/src/utils/value_generator.dart'; /// basic of which being the taking of the weighted sum of [inputs] and /// [weights], and passing it on to the next `Neuron`. class Neuron { - /// `Sprint` instance for logging messages. - final Sprint log = Sprint('Neuron'); - /// The activation algorithm used for determining this `Neuron`'s level of /// activation. late final ActivationFunction activation; @@ -60,6 +54,10 @@ class Neuron { /// [weights] - (Optional) Weights of connections to `Neuron`s in the previous /// `Layer`. If the [weights] aren't provided, they will be generated /// randomly. + /// + /// ⚠️ Throws a [FormatException] if the number of weights supplied to this + /// neuron does not match the number of connections to neurons in the parent + /// layer. Neuron({ required ActivationAlgorithm activationAlgorithm, required int parentLayerSize, @@ -90,11 +88,10 @@ class Neuron { } if (weights.length != parentLayerSize) { - log.severe( + throw const FormatException( 'The number of weights supplied to this neuron does not match the ' 'number of connections to neurons in the parent layer.', ); - exit(0); } // ignore: prefer_initializing_formals @@ -104,10 +101,11 @@ class Neuron { /// Accepts a single [input] or multiple [inputs] by assigning them to the /// [inputs] of this `Neuron`. + /// + /// ⚠️ Throws a [FormatException] if both [inputs] and [input] are `null`. void accept({List? inputs, double? input}) { if (inputs == null && input == null) { - log.severe('Attempted to accept without any inputs.'); - exit(0); + throw const FormatException('Attempted to accept without any inputs.'); } if (!isInput && inputs != null) { diff --git a/pubspec.yaml b/pubspec.yaml index e41f0b8..a36b9d3 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,20 +1,16 @@ name: synadart -version: 0.4.5 +version: 0.5.0 description: >- A limited but fully documented neural network library created for educational purposes. -homepage: https://github.com/wordcollector/synadart -repository: https://github.com/wordcollector/synadart -issue_tracker: https://github.com/wordcollector/synadart/issues +homepage: https://github.com/vxern/synadart +repository: https://github.com/vxern/synadart +issue_tracker: https://github.com/vxern/synadart/issues environment: - sdk: '>=2.17.0 <3.0.0' - -dependencies: - # Logging - sprint: ^1.0.4 + sdk: ">=3.0.0 <4.0.0" dev_dependencies: - words: ^0.1.1 + words: ^0.4.4