From 89db1b28198dafe53920487251111f2c4ec05693 Mon Sep 17 00:00:00 2001 From: KEMBL Date: Thu, 3 Sep 2020 06:10:48 +0200 Subject: [PATCH] feat: activation function can be assigned to each layer fix: fixed output of logs according to log level --- src/index.ts | 30 ++++++++++++------ src/neuron/Layer.ts | 17 ++++++++-- src/neuron/Network.ts | 16 ++++++---- src/neuron/Neuron.ts | 18 +++++++++-- src/neuron/README.md | 2 +- src/neuron/configuration/Configuration.ts | 6 ++-- src/neuron/configuration/SharedFunctions.ts | 35 +++++++++++++++------ src/neuron/models/index.ts | 1 + src/neuron/models/types.ts | 8 +++++ src/services/Logger.service.ts | 2 +- 10 files changed, 101 insertions(+), 34 deletions(-) create mode 100644 src/neuron/models/index.ts create mode 100644 src/neuron/models/types.ts diff --git a/src/index.ts b/src/index.ts index eb0d785..0e1bcfb 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,22 +1,29 @@ +import { LayerConfig } from './neuron/models'; import { Configuration, Network } from './neuron'; import { Log } from './services'; import { Verbosity } from './models'; class Program { constructor() { + const started = new Date(); Log.log('Programm started'); - Log.verbosity = Verbosity.Info; + Log.verbosity = Verbosity.Warning; Configuration.bias = 1; - Configuration.activationType = 'Sigmoid'; - Configuration.useCostFunction = 'Identity'; + Configuration.activationType = 'ReLU'; // default + Configuration.useCostFunction = 'Default'; + // Regression const networkInputs = [1, 0]; const targetOutputs = [1]; const maximumCostError = 0.0001; - const maxLearningSteps = 1; + const maxLearningSteps = 10000; const learningDelta = 0.1; - const layers = [2, 2, 1]; + const layersConfig: LayerConfig[] = [ + { neurons: 2 }, + { neurons: 2 }, + { neurons: 1, activationType: 'Sigmoid' } + ]; // Fill in arrays if want to start not from random weights // Neurons: XYZ X - source output, Y - layer row Z - input Layer @@ -45,14 +52,14 @@ class Program { // ]; const network = new Network( - layers[0], + layersConfig[0].neurons, maxLearningSteps, maximumCostError, learningDelta ); // error, ldelta, maxSteps - for (const neuronsCount of layers) { - network.addLayer(neuronsCount); // make neurons / init them / etc + for (const layerConfig of layersConfig) { + network.addLayer(layerConfig); // make neurons / init them / etc } if (weights.length > 0) { @@ -64,8 +71,13 @@ class Program { const result = network.output(); Log.log('Programm finished', result, targetOutputs); Log.log('Result weights', network.getWeights()); - Log.log('Last step', Network.currentStep + 1); + Log.log('Last step', Network.currentStep); Log.log('Error cost', network.findStepError(targetOutputs)); + Log.log( + `Finished in`, + new Date().getSeconds() - started.getSeconds(), + 'seconds' + ); } } diff --git a/src/neuron/Layer.ts b/src/neuron/Layer.ts index eefbc55..d4e4a5f 100644 --- a/src/neuron/Layer.ts +++ b/src/neuron/Layer.ts @@ -1,4 +1,5 @@ import { Log } from '../services'; +import { ActivationType } from './models'; import { Neuron } from './'; // shortcut to rounding function @@ -10,18 +11,30 @@ import { Neuron } from './'; */ export class Layer { private moduleName = ''; + private activationType: ActivationType = 'ReLU'; public neurons: Neuron[] = []; - constructor(public layerId: number, private neuronsAmount: number) { + constructor( + public layerId: number, + private neuronsAmount: number, + activationType?: ActivationType + ) { + if (activationType) { + this.activationType = activationType; + } this.init(); } private init = (): void => { this.neurons = []; this.moduleName = `Lr ${this.layerId}`; + Log.debug( + `Config: neuronsAmount ${this.neuronsAmount}, activation: ${this.activationType}`, + this.moduleName + ); for (let i = 0; i < this.neuronsAmount; i++) { const neuronId = i + 1; - const neuron = new Neuron(this.layerId, neuronId); + const neuron = new Neuron(this.layerId, neuronId, this.activationType); this.neurons.push(neuron); } }; diff --git a/src/neuron/Network.ts b/src/neuron/Network.ts index 87b75f3..26be0a5 100644 --- a/src/neuron/Network.ts +++ b/src/neuron/Network.ts @@ -1,5 +1,6 @@ import { Log } from '../services'; -import { Layer, StringFunctions } from './'; +import { Layer, StringFunctions, Configuration } from './'; +import { LayerConfig } from './models'; // shortcut to rounding function // eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars @@ -36,24 +37,27 @@ export class Network { this.maxError = maxCostError; this.ldelta = ldelta; - this.addLayer(inputs); // inuts + this.addLayer({ neurons: inputs }); // inuts } /** Adds new layer */ - addLayer = (neuronsCount: number): void => { + public addLayer = (config: LayerConfig): void => { + const selectedFunction = config.activationType + ? config.activationType + : Configuration.activationType; const layerId = this.layers.length; - const layer = new Layer(layerId, neuronsCount); + const layer = new Layer(layerId, config.neurons, selectedFunction); this.layers.push(layer); this.lastLayer = layer; }; /** Returns output of the last layer */ - output = (): number[] => { + public output = (): number[] => { return this.lastLayer.output(); }; /** Makes learning cycles */ - learn = (inputArray: number[], outputArray: number[]): void => { + public learn = (inputArray: number[], outputArray: number[]): void => { this.layers[0].setOutput(inputArray); for (let i = 1; i <= this.maxSteps; i++) { Network.currentStep = i; diff --git a/src/neuron/Neuron.ts b/src/neuron/Neuron.ts index c41a8f0..8ccf714 100644 --- a/src/neuron/Neuron.ts +++ b/src/neuron/Neuron.ts @@ -1,6 +1,7 @@ import { Log } from '../services'; import { Configuration, SharedFunctions } from './configuration'; import { StringFunctions } from '.'; +import { ActivationType } from './models'; // shortcut to rounding function // eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars @@ -29,17 +30,25 @@ export class Neuron { } private set input(value: number) { - this.activatedValue = SharedFunctions.activationFunction(value); + this.activatedValue = SharedFunctions.activationFunction( + value, + this.activationType + ); Log.debug( `Out: act(${fnz(value)}) -> ${fnz(this.activatedValue)}`, this.moduleName ); } - constructor(layerId: number, neuronId: number) { + constructor( + layerId: number, + neuronId: number, + private activationType: ActivationType + ) { this.neuronId = neuronId; this.layerId = layerId; this.moduleName = `Nr ${neuronId}${layerId}`; + Log.debug(`Config: activation: ${this.activationType}`, this.moduleName); } public cost(expected: number): number { @@ -97,7 +106,10 @@ export class Neuron { public correctWeights = (learningDelta: number): void => { const weightAdditionMultiplayer = this.propagationError * - SharedFunctions.activationFunctionPrime(this.output) * + SharedFunctions.activationFunctionPrime( + this.output, + this.activationType + ) * learningDelta; for (let i = 0; i < this.weights.length; i++) { diff --git a/src/neuron/README.md b/src/neuron/README.md index 46b94da..dd7ca8a 100644 --- a/src/neuron/README.md +++ b/src/neuron/README.md @@ -2,7 +2,7 @@ Business logic of the neural network -Feedforward neural network with backpropagation +Feedforward neural network with backpropagation, regression - More: http://galaxy.agh.edu.pl/~vlsi/AI/backp_t_en/backprop.html diff --git a/src/neuron/configuration/Configuration.ts b/src/neuron/configuration/Configuration.ts index 089e617..d07c3d0 100644 --- a/src/neuron/configuration/Configuration.ts +++ b/src/neuron/configuration/Configuration.ts @@ -1,10 +1,12 @@ +import { ActivationType, CostType } from '../models'; + /** * Main settings fo the ML system */ export class Configuration { public static bias = 1; /** Activation function */ - public static activationType = 'Identity'; + public static activationType: ActivationType = 'ReLU'; /** Cost function */ - public static useCostFunction = 'Squared'; + public static useCostFunction: CostType = 'Squared'; } diff --git a/src/neuron/configuration/SharedFunctions.ts b/src/neuron/configuration/SharedFunctions.ts index 6a249b9..4bbabb9 100644 --- a/src/neuron/configuration/SharedFunctions.ts +++ b/src/neuron/configuration/SharedFunctions.ts @@ -1,5 +1,11 @@ +import { ActivationType } from '../models'; import { Configuration } from './Configuration'; +export interface ActivationFunction { + (x: number): number; + (x: number, activationType: ActivationType): number; +} + /** * Shared functions */ @@ -16,12 +22,9 @@ export class SharedFunctions { return Math.random(); // [0, 1] case 'Sigmoid': return Math.random() - 1; // [-0.5, 0.5] - default: { - console.warn( - `Define initial weight function for ${Configuration.activationType} actiovation type` - ); + default: + //Identity return Math.random(); // [0, 1] - } } }; @@ -43,13 +46,19 @@ export class SharedFunctions { } // case 'CrossEntropy': // TODO: needs for classification default: - // Identity + // Default return expected - prediction; } }; - public static activationFunction = (x: number): number => { - switch (Configuration.activationType) { + public static activationFunction: ActivationFunction = ( + x: number, + activationType?: ActivationType + ): number => { + const selectedFunction = activationType + ? activationType + : Configuration.activationType; + switch (selectedFunction) { case 'ReLU': return SharedFunctions.activationFunctionReLU(x); case 'LeakyReLU': @@ -62,8 +71,14 @@ export class SharedFunctions { } }; - public static activationFunctionPrime = (x: number): number => { - switch (Configuration.activationType) { + public static activationFunctionPrime: ActivationFunction = ( + x: number, + activationType?: ActivationType + ): number => { + const selectedFunction = activationType + ? activationType + : Configuration.activationType; + switch (selectedFunction) { case 'ReLU': return SharedFunctions.activationFunctionReLUPrime(x); case 'LeakyReLU': diff --git a/src/neuron/models/index.ts b/src/neuron/models/index.ts new file mode 100644 index 0000000..fcb073f --- /dev/null +++ b/src/neuron/models/index.ts @@ -0,0 +1 @@ +export * from './types'; diff --git a/src/neuron/models/types.ts b/src/neuron/models/types.ts new file mode 100644 index 0000000..1bd980e --- /dev/null +++ b/src/neuron/models/types.ts @@ -0,0 +1,8 @@ +export interface LayerConfig { + neurons: number; + activationType?: ActivationType; +} + +export type ActivationType = 'ReLU' | 'LeakyReLU' | 'Sigmoid' | 'Identity'; + +export type CostType = 'Squared' | 'Default'; diff --git a/src/services/Logger.service.ts b/src/services/Logger.service.ts index a673eef..5f57d6a 100644 --- a/src/services/Logger.service.ts +++ b/src/services/Logger.service.ts @@ -54,7 +54,7 @@ export class Log { sourceName?: string, ...args: unknown[] ): void => { - if (verbosity < Log._globalVerbosity) { + if (verbosity > Log._globalVerbosity) { return; }