Skip to content

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
  • Loading branch information
KEMBL committed Aug 31, 2020
2 parents a29a4da + ca2cbdd commit 8d6c1a9
Show file tree
Hide file tree
Showing 18 changed files with 1,241 additions and 435 deletions.
1 change: 1 addition & 0 deletions .prettierignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
src/**/*.d.ts
3 changes: 1 addition & 2 deletions .prettierrc.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
module.exports = {
printWidth: 180,
bracketSpacing: false,
jsxBracketSameLine: true,
singleQuote: true,
trailingComma: false,
trailingComma: 'none',
bracketSpacing: true
};
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
# machine-learning

A set of code for testing machine learning related ideas in node enviroment

Feedforward neural network with backpropagation

- More: http://galaxy.agh.edu.pl/~vlsi/AI/backp_t_en/backprop.html

- More: https://www.youtube.com/watch?v=t-Jpm1axBko
101 changes: 101 additions & 0 deletions docs/9999.steps.error.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@




Cannot solve network in 9999 steps
interesting bug - newtork passed propagation one time in forward direction and nex time in back direction

in

Configuration.bias = 1;
Configuration.activationType = 'Sigmoid';
Configuration.useCostFunction = 'Identity';
const inputs = [1, 0];
const targetOutputs = [1];

const error = 0.0001;
const maxSteps = 10000;
const ldelta = 0.1;
const debug = true;
const layers = [2, 2, 1];

// Neurons: XYZ X - source output, Y - layer row Z - input Layer
// Debug. prefill weights
// [ [layer1], [layer2], ..., [[neuron1], [neuron2], ... ], [[[weight1, weight2, ...]], [[weight1, weight2, ...]], ...], [neuron2], ... ] ]
const weights = [
[
[0.13, -0.42], // w111, w211
[-0.34, 0.38] // w121, w221
],
[
[0.25, -0.2], // w112, w212
[0.07, 0.32] // w122, 2222
],
[[-0.41, 0.12]] // w113, w213
];


OUT:

Nt : Learn step 9999
Nt : Propagation
Nr 12: Out: act(2.7451) -> 0.94
Nr 12: prediction 0.94
Nr 22: Out: act(3.9656) -> 0.98
Nr 22: prediction 0.98
Nr 11: Out: act(7.1340) -> 1.00
Nr 11: prediction 1.00
Nr 21: Out: act(7.8411) -> 1.00
Nr 21: prediction 1.00
Nr 10: Out: act(0.028) -> 0.51
Nr 10: prediction 0.51
Nr 20: Out: act(-0.19) -> 0.45
Nr 20: prediction 0.45
Nr 13: costf expec: 1.0000, act: 0.99
Lr 3: Lec: 0 0.0050
Lr 3: Lec: 0.0050
Nt : Cost error search 0.0050
Nt : Res1 0.0050 <=? 0.0001
Nt : Back propagation
Lr 0: CountErrors
Lr 1: CountErrors
Nr 10: weightError W110 = 0
Nr 20: weightError W120 = 0
Nr 10: weightError W210 = 0
Nr 20: weightError W220 = 0
Lr 1: PropagationError [ 0, 0 ]
Lr 2: CountErrors
Nr 11: weightError W111 = 0
Nr 21: weightError W121 = 0
Nr 11: weightError W211 = 0
Nr 21: weightError W221 = 0
Lr 2: PropagationError [ 0, 0 ]
Lr 3: CountErrors
Nr 12: weightError W112 = 0
Nr 22: weightError W122 = 0
Lr 3: PropagationError [ 0 ]
Step weights [
[
[ 3.6557519074579687, 2.750120828433372 ],
[ 3.3842892480045124, 3.730466980473381 ]
],
[
[ 1.7539241589012051, 1.3473803347581663 ],
[ 2.9805490674365664, 3.333285567708932 ]
],
[ [ 1.7626923588248318, 2.5608905458477635 ] ]
]
Programm finished [ 0.9949911111648337 ] [ 1 ]
Result weights [
[
[ 3.6557519074579687, 2.750120828433372 ],
[ 3.3842892480045124, 3.730466980473381 ]
],
[
[ 1.7539241589012051, 1.3473803347581663 ],
[ 2.9805490674365664, 3.333285567708932 ]
],
[ [ 1.7626923588248318, 2.5608905458477635 ] ]
]
Done in 194.45s.

8 changes: 5 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@
"compile": "tsc --build --verbose",
"purge": "yarn clean:all && yarn && yarn prepare:all",
"lint": "eslint -c .eslintrc.js --ext .ts --ignore-pattern *.d.ts ./src",
"lint:fix": "eslint -c .eslintrc.js --ext .ts --ignore-pattern *.d.ts --fix ./src"
"lint:fix": "eslint -c .eslintrc.js --ext .ts --ignore-pattern *.d.ts --fix ./src",
"format": "prettier --write src/**/*.ts",
"pre-commit": "yarn lint:fix && yarn format"
},
"bugs": {
"url": "https://github.com/KEMBL/machine-learning/issues"
Expand Down Expand Up @@ -44,7 +46,7 @@
"eslint-plugin-prefer-arrow": "^1.2.1",
"prettier": "^1.19.1",
"prettier-eslint": "^9.0.1",
"ts-node": "^8.10.1",
"typescript": "^3.8.3"
"ts-node": "^9.0.0",
"typescript": "^4.0.2"
}
}
63 changes: 60 additions & 3 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,69 @@
import { Network } from './neuron/Network';
import { Configuration, Network } from './neuron';

class Program {
constructor() {
console.log('Programm started');

new Network().testNeuron();
Configuration.bias = 1;
Configuration.activationType = 'Sigmoid';
Configuration.useCostFunction = 'Identity';
const inputs = [1, 0];
const targetOutputs = [1];

console.log('Programm finished');
const maximumCostError = 0.0001;
const maxSteps = 1000;
const ldelta = 0.1;
const debug = true;
const layers = [2, 2, 1];

// Fill in arrays if want to start not from random weights
// Neurons: XYZ X - source output, Y - layer row Z - input Layer
// Debug. prefill weights
// [ [layer1], [layer2], ..., [[neuron1], [neuron2], ... ], [[[weight1, weight2, ...]], [[weight1, weight2, ...]], ...], [neuron2], ... ] ]
const weights: number[][][] = [];
// const weights: number[] = [
// [
// [0.13, -0.42], // w111, w211
// [-0.34, 0.38] // w121, w221
// ],
// [
// [0.25, -0.2], // w112, w212
// [0.07, 0.32] // w122, 2222
// ],
// [[-0.41, 0.12]] // w113, w213
// ];

// const weights = [
// [ [ 12.073027175758078, -0.42 ], [ 11.29143338568982, 0.38 ] ],
// [
// [ 2.5379574472175412, 2.060681210357274 ],
// [ 4.114487335508431, 4.26457245636459 ]
// ],
// [ [ 2.11694803045532, 2.897016751994774 ] ]
// ];

const network = new Network(
layers[0],
maxSteps,
maximumCostError,
ldelta,
debug
); // error, ldelta, maxSteps
for (const neuronsCount of layers) {
network.addLayer(neuronsCount); // make neurons / init them / etc
}

if (weights.length > 0) {
network.initWeights(weights);
}

network.learn(inputs, targetOutputs); // propagate / errorcost / weig\hts correction (back propagation)
//new Network().testNeuron();
const result = network.output();
console.log('Programm finished', result, targetOutputs);
console.log('Result weights', network.getWeights());
console.log('Last step', Network.currentStep + 1);
console.log('Error cost', network.findStepError(targetOutputs));
}
}

Expand Down
163 changes: 162 additions & 1 deletion src/neuron/Layer.ts
Original file line number Diff line number Diff line change
@@ -1 +1,162 @@
export class Layer {}
import { Neuron, StringFunctions } from './';

// shortcut to rounding function
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
// const _fnz = StringFunctions.fnz;

/**
* One neurons layer
*/
export class Layer {
private debug = false;
private name = '';
public neurons: Neuron[] = [];

constructor(
public layerId: number,
private neuronsAmount: number,
debug?: boolean
) {
this.debug = !!debug;
this.init();
}

private init = (): void => {
this.neurons = [];
this.name = `Lr ${this.layerId}`;
for (let i = 0; i < this.neuronsAmount; i++) {
const neuronId = i + 1;
const neuron = new Neuron(this.layerId, neuronId, this.debug);
this.neurons.push(neuron);
}
};

/** Allows to modify weighs of neurons for debug purposes */
public initWeights = (weights: number[][]): void => {
// this.log('Lw', weights);
for (let i = 0; i < this.neurons.length; i++) {
const neuron = this.neurons[i];
neuron.initWeights(weights[i]);
}
};

/** Debug method. Allows to set weights directly */
public getWeights = (): number[][] => {
// this.log('GNe', weights);
const weights: number[][] = [];
for (let i = 0; i < this.neurons.length; i++) {
const neuron = this.neurons[i];
weights.push(neuron.getWeights());
}
return weights;
};

/**
* Init layer, used to set output vars in the first layer
* @param sourceLayer
*/
public setOutput = (inputVariables: number[]): void => {
if (this.layerId !== 0) {
this.log(`WARN: Current layer ${this.layerId} is not an input layer!`);
}
for (let i = 0; i < this.neurons.length; i++) {
this.neurons[i].output = inputVariables[i];
}
};

/**
* Propagate previous layer neurons to all current layer neurons
* @param sourceLayer
*/
public propagate = (sourceLayer: Layer): void => {
// this.log(
// `Propagate layer ${this.layerId} from layer ${sourceLayer.layerId}`,
// this.neurons.length
// );
for (let i = 0; i < this.neurons.length; i++) {
this.propagateNeuron(this.neurons[i], sourceLayer);
this.neurons[i].prediction();
}
};

/**
* Takes layer's neuron and feed it with all income signals
* @param neuron
*/
private propagateNeuron = (neuron: Neuron, sourceLayer: Layer): void => {
// this.log(`propagateNeuron`, sourceLayer.neurons.length);
for (let i = 0; i < sourceLayer.neurons.length; i++) {
neuron.propagate(i, sourceLayer.neurons[i].output);
// neuron.propagate(0, sourceLayer.neurons[i].output);
}
};

public output = (): number[] => {
const resultsList: number[] = [];
for (let i = 0; i < this.neurons.length; i++) {
resultsList.push(this.neurons[i].output);
}
return resultsList;
};

public cost = (outputArray: number[]): number => {
let cost = 0;
for (let i = 0; i < this.neurons.length; i++) {
cost += this.neurons[i].cost(outputArray[i]);
}
const layerErrorCost = cost / (2 * this.neurons.length); // TODO: ? what is the purpose of division by 2*... ?
// this.log(`Lec: ${fnz(layerErrorCost)}`);
return layerErrorCost;
};

/** Receives values of errors on the next layer neurons */
public countErrors = (
nextLayerOutputArray: number[],
nextLayer?: Layer
): number[] => {
this.log(`CountErrors`);
if (this.layerId === 0) {
return [];
}

const errorWeights: number[] = [];
for (let i = 0; i < this.neurons.length; i++) {
if (nextLayer === undefined) {
this.neurons[i].propagationError = this.neurons[i].cost(
nextLayerOutputArray[i]
);
} else {
this.neurons[i].propagationError = nextLayer.getWeightError(i);
}

errorWeights[i] = this.neurons[i].propagationError;
}
this.log(`PropagationError`, errorWeights);
return errorWeights;
};

/**
* Collects sum of all errors on the given weight index
*/
private getWeightError = (inputId: number): number => {
let error = 0;
for (let i = 0; i < this.neurons.length; i++) {
error += this.neurons[i].weightError(inputId);
}
return error;
};

public correctWeights = (learningDelta: number): void => {
for (let i = 0; i < this.neurons.length; i++) {
this.neurons[i].correctWeights(learningDelta);
}
};

private log = (logLine: string, ...args: unknown[]): void => {
if (!this.debug) {
return;
}

StringFunctions.log(`${this.name}: ${logLine}`, ...args);
};
}
Loading

0 comments on commit 8d6c1a9

Please sign in to comment.