Skip to content

Commit

Permalink
feat: activation function can be assigned to each layer
Browse files Browse the repository at this point in the history
fix: fixed output of logs according to log level
  • Loading branch information
KEMBL committed Sep 3, 2020
1 parent 43e4184 commit 89db1b2
Show file tree
Hide file tree
Showing 10 changed files with 101 additions and 34 deletions.
30 changes: 21 additions & 9 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,22 +1,29 @@
import { LayerConfig } from './neuron/models';
import { Configuration, Network } from './neuron';
import { Log } from './services';
import { Verbosity } from './models';

class Program {
constructor() {
const started = new Date();
Log.log('Programm started');

Log.verbosity = Verbosity.Info;
Log.verbosity = Verbosity.Warning;
Configuration.bias = 1;
Configuration.activationType = 'Sigmoid';
Configuration.useCostFunction = 'Identity';
Configuration.activationType = 'ReLU'; // default
Configuration.useCostFunction = 'Default';

// Regression
const networkInputs = [1, 0];
const targetOutputs = [1];
const maximumCostError = 0.0001;
const maxLearningSteps = 1;
const maxLearningSteps = 10000;
const learningDelta = 0.1;
const layers = [2, 2, 1];
const layersConfig: LayerConfig[] = [
{ neurons: 2 },
{ neurons: 2 },
{ neurons: 1, activationType: 'Sigmoid' }
];

// Fill in arrays if want to start not from random weights
// Neurons: XYZ X - source output, Y - layer row Z - input Layer
Expand Down Expand Up @@ -45,14 +52,14 @@ class Program {
// ];

const network = new Network(
layers[0],
layersConfig[0].neurons,
maxLearningSteps,
maximumCostError,
learningDelta
); // error, ldelta, maxSteps

for (const neuronsCount of layers) {
network.addLayer(neuronsCount); // make neurons / init them / etc
for (const layerConfig of layersConfig) {
network.addLayer(layerConfig); // make neurons / init them / etc
}

if (weights.length > 0) {
Expand All @@ -64,8 +71,13 @@ class Program {
const result = network.output();
Log.log('Programm finished', result, targetOutputs);
Log.log('Result weights', network.getWeights());
Log.log('Last step', Network.currentStep + 1);
Log.log('Last step', Network.currentStep);
Log.log('Error cost', network.findStepError(targetOutputs));
Log.log(
`Finished in`,
new Date().getSeconds() - started.getSeconds(),
'seconds'
);
}
}

Expand Down
17 changes: 15 additions & 2 deletions src/neuron/Layer.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Log } from '../services';
import { ActivationType } from './models';
import { Neuron } from './';

// shortcut to rounding function
Expand All @@ -10,18 +11,30 @@ import { Neuron } from './';
*/
export class Layer {
private moduleName = '';
private activationType: ActivationType = 'ReLU';
public neurons: Neuron[] = [];

constructor(public layerId: number, private neuronsAmount: number) {
constructor(
public layerId: number,
private neuronsAmount: number,
activationType?: ActivationType
) {
if (activationType) {
this.activationType = activationType;
}
this.init();
}

private init = (): void => {
this.neurons = [];
this.moduleName = `Lr ${this.layerId}`;
Log.debug(
`Config: neuronsAmount ${this.neuronsAmount}, activation: ${this.activationType}`,
this.moduleName
);
for (let i = 0; i < this.neuronsAmount; i++) {
const neuronId = i + 1;
const neuron = new Neuron(this.layerId, neuronId);
const neuron = new Neuron(this.layerId, neuronId, this.activationType);
this.neurons.push(neuron);
}
};
Expand Down
16 changes: 10 additions & 6 deletions src/neuron/Network.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { Log } from '../services';
import { Layer, StringFunctions } from './';
import { Layer, StringFunctions, Configuration } from './';
import { LayerConfig } from './models';

// shortcut to rounding function
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
Expand Down Expand Up @@ -36,24 +37,27 @@ export class Network {
this.maxError = maxCostError;
this.ldelta = ldelta;

this.addLayer(inputs); // inuts
this.addLayer({ neurons: inputs }); // inuts
}

/** Adds new layer */
addLayer = (neuronsCount: number): void => {
public addLayer = (config: LayerConfig): void => {
const selectedFunction = config.activationType
? config.activationType
: Configuration.activationType;
const layerId = this.layers.length;
const layer = new Layer(layerId, neuronsCount);
const layer = new Layer(layerId, config.neurons, selectedFunction);
this.layers.push(layer);
this.lastLayer = layer;
};

/** Returns output of the last layer */
output = (): number[] => {
public output = (): number[] => {
return this.lastLayer.output();
};

/** Makes learning cycles */
learn = (inputArray: number[], outputArray: number[]): void => {
public learn = (inputArray: number[], outputArray: number[]): void => {
this.layers[0].setOutput(inputArray);
for (let i = 1; i <= this.maxSteps; i++) {
Network.currentStep = i;
Expand Down
18 changes: 15 additions & 3 deletions src/neuron/Neuron.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { Log } from '../services';
import { Configuration, SharedFunctions } from './configuration';
import { StringFunctions } from '.';
import { ActivationType } from './models';

// shortcut to rounding function
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
Expand Down Expand Up @@ -29,17 +30,25 @@ export class Neuron {
}

private set input(value: number) {
this.activatedValue = SharedFunctions.activationFunction(value);
this.activatedValue = SharedFunctions.activationFunction(
value,
this.activationType
);
Log.debug(
`Out: act(${fnz(value)}) -> ${fnz(this.activatedValue)}`,
this.moduleName
);
}

constructor(layerId: number, neuronId: number) {
constructor(
layerId: number,
neuronId: number,
private activationType: ActivationType
) {
this.neuronId = neuronId;
this.layerId = layerId;
this.moduleName = `Nr ${neuronId}${layerId}`;
Log.debug(`Config: activation: ${this.activationType}`, this.moduleName);
}

public cost(expected: number): number {
Expand Down Expand Up @@ -97,7 +106,10 @@ export class Neuron {
public correctWeights = (learningDelta: number): void => {
const weightAdditionMultiplayer =
this.propagationError *
SharedFunctions.activationFunctionPrime(this.output) *
SharedFunctions.activationFunctionPrime(
this.output,
this.activationType
) *
learningDelta;

for (let i = 0; i < this.weights.length; i++) {
Expand Down
2 changes: 1 addition & 1 deletion src/neuron/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

Business logic of the neural network

Feedforward neural network with backpropagation
Feedforward neural network with backpropagation, regression

- More: http://galaxy.agh.edu.pl/~vlsi/AI/backp_t_en/backprop.html

Expand Down
6 changes: 4 additions & 2 deletions src/neuron/configuration/Configuration.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import { ActivationType, CostType } from '../models';

/**
* Main settings fo the ML system
*/
export class Configuration {
public static bias = 1;
/** Activation function */
public static activationType = 'Identity';
public static activationType: ActivationType = 'ReLU';
/** Cost function */
public static useCostFunction = 'Squared';
public static useCostFunction: CostType = 'Squared';
}
35 changes: 25 additions & 10 deletions src/neuron/configuration/SharedFunctions.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
import { ActivationType } from '../models';
import { Configuration } from './Configuration';

export interface ActivationFunction {
(x: number): number;
(x: number, activationType: ActivationType): number;
}

/**
* Shared functions
*/
Expand All @@ -16,12 +22,9 @@ export class SharedFunctions {
return Math.random(); // [0, 1]
case 'Sigmoid':
return Math.random() - 1; // [-0.5, 0.5]
default: {
console.warn(
`Define initial weight function for ${Configuration.activationType} actiovation type`
);
default:
//Identity
return Math.random(); // [0, 1]
}
}
};

Expand All @@ -43,13 +46,19 @@ export class SharedFunctions {
}
// case 'CrossEntropy': // TODO: needs for classification
default:
// Identity
// Default
return expected - prediction;
}
};

public static activationFunction = (x: number): number => {
switch (Configuration.activationType) {
public static activationFunction: ActivationFunction = (
x: number,
activationType?: ActivationType
): number => {
const selectedFunction = activationType
? activationType
: Configuration.activationType;
switch (selectedFunction) {
case 'ReLU':
return SharedFunctions.activationFunctionReLU(x);
case 'LeakyReLU':
Expand All @@ -62,8 +71,14 @@ export class SharedFunctions {
}
};

public static activationFunctionPrime = (x: number): number => {
switch (Configuration.activationType) {
public static activationFunctionPrime: ActivationFunction = (
x: number,
activationType?: ActivationType
): number => {
const selectedFunction = activationType
? activationType
: Configuration.activationType;
switch (selectedFunction) {
case 'ReLU':
return SharedFunctions.activationFunctionReLUPrime(x);
case 'LeakyReLU':
Expand Down
1 change: 1 addition & 0 deletions src/neuron/models/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export * from './types';
8 changes: 8 additions & 0 deletions src/neuron/models/types.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
export interface LayerConfig {
neurons: number;
activationType?: ActivationType;
}

export type ActivationType = 'ReLU' | 'LeakyReLU' | 'Sigmoid' | 'Identity';

export type CostType = 'Squared' | 'Default';
2 changes: 1 addition & 1 deletion src/services/Logger.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ export class Log {
sourceName?: string,
...args: unknown[]
): void => {
if (verbosity < Log._globalVerbosity) {
if (verbosity > Log._globalVerbosity) {
return;
}

Expand Down

0 comments on commit 89db1b2

Please sign in to comment.