Skip to content

Commit

Permalink
feat: #7 now it is possible to use generators as source of samples
Browse files Browse the repository at this point in the history
fix: #8 network does not count epoch cost properly and completes work before training data is completed
not yet finished
  • Loading branch information
KEMBL committed Sep 7, 2020
1 parent 89db1b2 commit 447cd78
Show file tree
Hide file tree
Showing 9 changed files with 355 additions and 61 deletions.
3 changes: 2 additions & 1 deletion docs/neuron.notes.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,5 @@ Q&A
1) Define relation between cost function and activation function. How result of the cost function changes weight?
2) how to normalize w random selection? taking in account that P to select w [0,1] should be = P to select w (1,inf]? 0 -> 45 - > 90 degrees
any random value close to 1 would be ok?
3) why if input is greater then output it takes less steps to find a result?
3) why if input is greater then output it takes less steps to find a result?
4) How is better to represent examples from the learning set? Random or one by one?
107 changes: 93 additions & 14 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,32 +5,79 @@ import { Verbosity } from './models';

class Program {
constructor() {
const started = new Date();
const startedTime = new Date();
Log.log('Programm started');

Log.verbosity = Verbosity.Warning;
Configuration.bias = 1;
Configuration.activationType = 'ReLU'; // default
Configuration.useCostFunction = 'Default';
Configuration.useCostFunction = 'Squared';

// Regression
const networkInputs = [1, 0];
const targetOutputs = [1];
const inputsAmount = 1;
function* generatorSinus(
inputsAmount: number
): Generator<
{
inputArray: number[];
outputArray: number[];
},
void,
unknown
> {
let angle = 1; // degress
const max = 360;
while (true) {
const set = {
inputArray: Array<number>(inputsAmount).fill(0),
outputArray: Array<number>(1)
};
const degress = (angle * Math.PI) / 180;
set.inputArray[0] = degress;
set.outputArray = [Math.sin(degress)];
yield set;
angle = ++angle % max;
}
}

// const gen = generatorSinus(2);
// console.log(gen.next());
// console.log(gen.next());
// console.log(gen.next());
// console.log(gen.next());
// console.log(gen.next());
// console.log(gen.next());
// console.log(gen.next());
// console.log(gen.next());
// console.log(gen.next());

// if(startedTime.getTime() !==1) return;

// 2,2,1- 41 sec
// 3,3,1- 60 sec
// 3,3,3,1 - 80 sec

//const networkInputs = [[1, 0]];
const targetOutputs = [[0.5]];
const maximumCostError = 0.0001;
const maxLearningSteps = 10000;
const maxEpochsCount = 10000;
const learningDelta = 0.1;
const layersConfig: LayerConfig[] = [
{ neurons: 2 },
{ neurons: 2 },
{ neurons: 3 },
{ neurons: 3 },
{ neurons: 3 },
// { neurons: 20 },
// { neurons: 3 },
{ neurons: 1, activationType: 'Sigmoid' }
//{ neurons: 1 }
];

// Fill in arrays if want to start not from random weights
// Neurons: XYZ X - source output, Y - layer row Z - input Layer
// Debug. prefill weights
// [ [layer1], [layer2], ..., [[neuron1], [neuron2], ... ], [[[weight1, weight2, ...]], [[weight1, weight2, ...]], ...], [neuron2], ... ] ]
const weights: number[][][] = [];
// const weights: number[] = [
// const weights: number[][][] = [
// [
// [0.13, -0.42], // w111, w211
// [-0.34, 0.38] // w121, w221
Expand All @@ -52,8 +99,8 @@ class Program {
// ];

const network = new Network(
layersConfig[0].neurons,
maxLearningSteps,
inputsAmount,
maxEpochsCount,
maximumCostError,
learningDelta
); // error, ldelta, maxSteps
Expand All @@ -66,18 +113,50 @@ class Program {
network.initWeights(weights);
}

network.learn(networkInputs, targetOutputs); // propagate / errorcost / weig\hts correction (back propagation)
//network.train(networkInputs, targetOutputs); // propagate / errorcost / weig\hts correction (back propagation)
network.train(generatorSinus, 360); // propagate / errorcost / weig\hts correction (back propagation)
//new Network().testNeuron();
const result = network.output();
Log.log('Programm finished', result, targetOutputs);
Log.log('Result weights', network.getWeights());
Log.log('Last step', Network.currentStep);
Log.log('Error cost', network.findStepError(targetOutputs));
Log.log('Error cost', network.networkError());
Log.log('Epochs', network.epochsCount());
Log.log(
`Finished in`,
new Date().getSeconds() - started.getSeconds(),
(new Date().getTime() - startedTime.getTime()) * 0.001,
'seconds'
);

if (startedTime.getTime() === -1) {
Log.log('');
Log.log('Prediction');
// for (let i = 0; i < networkInputs.length; i++) {
// const error = network.predict(networkInputs[i], targetOutputs[i]);
// Log.log(
// `Step ${i + 1}, Error cost`,
// error,
// network.output(),
// targetOutputs[i]
// );
// }

// const generator = generatorSinus(inputsAmount);
// for (let i = 0; i < 3; i++) {
// const sample = generator.next();
// if (sample.value) {
// const error = network.predict(
// sample.value.inputArray,
// sample.value.outputArray
// );
// Log.log(
// `Step ${i + 1}, Error cost`,
// error,
// network.output(),
// sample.value.outputArray
// );
// }
// }
}
}
}

Expand Down
31 changes: 27 additions & 4 deletions src/neuron/Layer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,12 @@ import { Neuron } from './';
* One neurons layer
*/
export class Layer {
public neurons: Neuron[] = [];

private moduleName = '';
private activationType: ActivationType = 'ReLU';
public neurons: Neuron[] = [];
/** Last found error cost */
private layerErrorCost = 0;

constructor(
public layerId: number,
Expand All @@ -25,6 +28,10 @@ export class Layer {
this.init();
}

public toString = (): string => {
return this.moduleName;
};

private init = (): void => {
this.neurons = [];
this.moduleName = `Lr ${this.layerId}`;
Expand Down Expand Up @@ -85,6 +92,13 @@ export class Layer {
// this.neurons.length
// );
for (let i = 0; i < this.neurons.length; i++) {
Log.debug(
`propagate`,
this.moduleName,
i,
this.neurons.length,
`${sourceLayer}`
);
this.propagateNeuron(this.neurons[i], sourceLayer);
this.neurons[i].prediction();
}
Expand All @@ -95,7 +109,12 @@ export class Layer {
* @param neuron
*/
private propagateNeuron = (neuron: Neuron, sourceLayer: Layer): void => {
// this.log(`propagateNeuron`, sourceLayer.neurons.length);
Log.debug(
`propagateNeuron`,
this.moduleName,
sourceLayer.neurons.length,
`${sourceLayer}`
);
for (let i = 0; i < sourceLayer.neurons.length; i++) {
neuron.propagate(i, sourceLayer.neurons[i].output);
// neuron.propagate(0, sourceLayer.neurons[i].output);
Expand All @@ -115,9 +134,13 @@ export class Layer {
for (let i = 0; i < this.neurons.length; i++) {
cost += this.neurons[i].cost(outputArray[i]);
}
const layerErrorCost = cost / (2 * this.neurons.length); // TODO: ? what is the purpose of division by 2*... ?
this.layerErrorCost = cost / (2 * this.neurons.length); // TODO: ? what is the purpose of division by 2*... ?
// this.log(`Lec: ${fnz(layerErrorCost)}`);
return layerErrorCost;
return this.layerErrorCost;
};

public costError = (): number => {
return this.layerErrorCost;
};

/** Receives values of errors on the next layer neurons */
Expand Down
Loading

0 comments on commit 447cd78

Please sign in to comment.