Skip to content

Add custom loss functions and a R/W state matrix #936

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 32 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
ee5fc8d
Implement a loss function for GPU
ashenmagic Jun 15, 2024
6baf8c6
Add partial support for loss functions
ashenmagic Jun 16, 2024
eb909c9
Update loss.ts
ashenmagic Jun 16, 2024
1f2c681
OMG OMG OMG!!!!!! ZOOOOOMIESSS <3333
ashenmagic Jun 16, 2024
ba12f82
Fixed the bug~! <3
ashenmagic Jun 16, 2024
7371a23
Generalize loss function for both CPU and GPU
ashenmagic Jun 16, 2024
2a7840a
Add memory function
ashenmagic Jun 16, 2024
a762a48
Backup: Another thunderstorm, power outage risk
ashenmagic Jun 16, 2024
94fc99a
Revert "Backup: Another thunderstorm, power outage risk"
ashenmagic Jun 16, 2024
ba03eb3
Add parameter `lossDelta`
ashenmagic Jun 16, 2024
aa337f3
Rename memory to RAM
ashenmagic Jun 16, 2024
c655c52
Add `updateRAM`
ashenmagic Jun 17, 2024
b45d581
Fix bug that required `ramSize` to be defined
ashenmagic Jun 17, 2024
b703e4a
Prune unused code
ashenmagic Jun 17, 2024
fda0349
Run `updateRAM` on both CPU and GPU nets
ashenmagic Jun 17, 2024
3d392f1
Design custom loss function for autoencoders
ashenmagic Jun 17, 2024
ce98bf1
Fix CI task errors
ashenmagic Jun 17, 2024
51b9aa9
Fix a CI task related to type coersion
ashenmagic Jun 17, 2024
c5c8438
TypeScript hates me today
ashenmagic Jun 17, 2024
e8384a5
Fix all lint errors
ashenmagic Jun 17, 2024
a21c387
Remove unused `@ts-expect-error` directive
ashenmagic Jun 17, 2024
83574f6
Please, linter gods, pleaaaase stop hating me
ashenmagic Jun 17, 2024
52edc88
Properly initialize `NeuralNetwork.ram`
ashenmagic Jun 18, 2024
8f8f455
Finish updating autoencoder to use loss function
ashenmagic Jun 18, 2024
00b8515
Add a CPU variant of autoencoder
ashenmagic Jun 18, 2024
e4e6906
Polish autoencoders and remove debug code
ashenmagic Jun 18, 2024
4d7b5ef
Remove debug code
ashenmagic Jun 18, 2024
cd0ad75
Export the CPU autoencoder implementation
ashenmagic Jun 18, 2024
693bd0b
Update tests and documentation
ashenmagic Jun 19, 2024
a03161f
Merge branch 'main' into feature/loss-function
ashenmagic Nov 6, 2024
561dda3
Remove duplicate lines added during merge
ashenmagic Dec 29, 2024
42aece2
Resolve nested tests
ashenmagic Jan 3, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Fix all lint errors
  • Loading branch information
ashenmagic committed Jun 17, 2024
commit e8384a5878e6083c05848047c778fcb3e7f34402
105 changes: 47 additions & 58 deletions src/autoencoder.test.ts
Original file line number Diff line number Diff line change
@@ -1,79 +1,68 @@
import AutoencoderGPU from "./autoencoder";
import AutoencoderGPU from './autoencoder';

const trainingData = [
[0, 0, 0],
[0, 1, 1],
[1, 0, 1],
[1, 1, 0]
[1, 1, 0],
];

const xornet = new AutoencoderGPU<number[], number[]>(
{
inputSize: 3,
hiddenLayers: [ 5, 2, 5 ]
}
);
const xornet = new AutoencoderGPU<number[], number[]>({
inputSize: 3,
hiddenLayers: [5, 2, 5],
});

const errorThresh = 0.011;

const result = xornet.train(
trainingData, {
iterations: 100000,
errorThresh
}
);

test(
"denoise a data sample",
async () => {
expect(result.error).toBeLessThanOrEqual(errorThresh);

function xor(...args: number[]) {
return Math.round(xornet.denoise(args)[2]);
}
const result = xornet.train(trainingData, {
iterations: 100000,
errorThresh,
});

const run1 = xor(0, 0, 0);
const run2 = xor(0, 1, 1);
const run3 = xor(1, 0, 1);
const run4 = xor(1, 1, 0);
test('denoise a data sample', async () => {
expect(result.error).toBeLessThanOrEqual(errorThresh);

expect(run1).toBe(0);
expect(run2).toBe(1);
expect(run3).toBe(1);
expect(run4).toBe(0);
function xor(...args: number[]) {
return Math.round(xornet.denoise(args)[2]);
}
);

test(
"encode and decode a data sample",
async () => {
expect(result.error).toBeLessThanOrEqual(errorThresh);
const run1 = xor(0, 0, 0);
const run2 = xor(0, 1, 1);
const run3 = xor(1, 0, 1);
const run4 = xor(1, 1, 0);

const run1$input = [0, 0, 0];
const run1$encoded = xornet.encode(run1$input);
const run1$decoded = xornet.decode(run1$encoded);
expect(run1).toBe(0);
expect(run2).toBe(1);
expect(run3).toBe(1);
expect(run4).toBe(0);
});

const run2$input = [0, 1, 1];
const run2$encoded = xornet.encode(run2$input);
const run2$decoded = xornet.decode(run2$encoded);
test('encode and decode a data sample', async () => {
expect(result.error).toBeLessThanOrEqual(errorThresh);

for (let i = 0; i < 3; i++) expect(Math.round(run1$decoded[i])).toBe(run1$input[i]);
for (let i = 0; i < 3; i++) expect(Math.round(run2$decoded[i])).toBe(run2$input[i]);
}
);
const run1$input = [0, 0, 0];
const run1$encoded = xornet.encode(run1$input);
const run1$decoded = xornet.decode(run1$encoded);

const run2$input = [0, 1, 1];
const run2$encoded = xornet.encode(run2$input);
const run2$decoded = xornet.decode(run2$encoded);

test(
"test a data sample for anomalies",
async () => {
expect(result.error).toBeLessThanOrEqual(errorThresh);
for (let i = 0; i < 3; i++)
expect(Math.round(run1$decoded[i])).toBe(run1$input[i]);
for (let i = 0; i < 3; i++)
expect(Math.round(run2$decoded[i])).toBe(run2$input[i]);
});

function includesAnomalies(...args: number[]) {
expect(xornet.likelyIncludesAnomalies(args)).toBe(false);
}
test('test a data sample for anomalies', async () => {
expect(result.error).toBeLessThanOrEqual(errorThresh);

includesAnomalies(0, 0, 0);
includesAnomalies(0, 1, 1);
includesAnomalies(1, 0, 1);
includesAnomalies(1, 1, 0);
function includesAnomalies(...args: number[]) {
expect(xornet.likelyIncludesAnomalies(args)).toBe(false);
}
);

includesAnomalies(0, 0, 0);
includesAnomalies(0, 1, 1);
includesAnomalies(1, 0, 1);
includesAnomalies(1, 1, 0);
});
70 changes: 50 additions & 20 deletions src/autoencoder.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,23 @@
import { IKernelFunctionThis, KernelOutput, Texture, TextureArrayOutput } from "gpu.js";
import { IJSONLayer, INeuralNetworkData, INeuralNetworkDatum, INeuralNetworkTrainOptions, NeuralNetworkIO, NeuralNetworkRAM } from "./neural-network";
import { INeuralNetworkGPUOptions, NeuralNetworkGPU } from "./neural-network-gpu";
import { INeuralNetworkState } from "./neural-network-types";
import { UntrainedNeuralNetworkError } from "./errors/untrained-neural-network-error";
import {
IKernelFunctionThis,
KernelOutput,
Texture,
TextureArrayOutput,
} from 'gpu.js';
import {
IJSONLayer,
INeuralNetworkData,
INeuralNetworkDatum,
INeuralNetworkTrainOptions,
NeuralNetworkIO,
NeuralNetworkRAM,
} from './neural-network';
import {
INeuralNetworkGPUOptions,
NeuralNetworkGPU,
} from './neural-network-gpu';
import { INeuralNetworkState } from './neural-network-types';
import { UntrainedNeuralNetworkError } from './errors/untrained-neural-network-error';

function loss(
this: IKernelFunctionThis,
Expand All @@ -16,7 +31,7 @@ function loss(
// if ( o ≈ i0 ) then return 10% of the loss value.
// Otherwise, return 1000% of the full loss value.
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
// @ts-expect-error
if (Math.round(actual) !== Math.round(inputs[this.thread.x])) error *= 32;
else error *= 0.03125;

Expand All @@ -26,12 +41,13 @@ function loss(
/**
* An autoencoder learns to compress input data down to relevant features and reconstruct input data from its compressed representation.
*/
export class AutoencoderGPU<DecodedData extends INeuralNetworkData, EncodedData extends INeuralNetworkData> extends NeuralNetworkGPU<DecodedData, DecodedData> {
export class AutoencoderGPU<
DecodedData extends INeuralNetworkData,
EncodedData extends INeuralNetworkData
> extends NeuralNetworkGPU<DecodedData, DecodedData> {
private decoder?: NeuralNetworkGPU<EncodedData, DecodedData>;

constructor (
options?: Partial<INeuralNetworkGPUOptions>
) {
constructor(options?: Partial<INeuralNetworkGPUOptions>) {
// Create default options for the autoencoder.
options ??= {};

Expand All @@ -40,7 +56,7 @@ export class AutoencoderGPU<DecodedData extends INeuralNetworkData, EncodedData
// Define the denoiser subnet's input and output sizes.
options.inputSize = options.outputSize = decodedSize;

options.hiddenLayers ??= [ Math.round(decodedSize * 0.66) ];
options.hiddenLayers ??= [Math.round(decodedSize * 0.66)];

options.loss ??= loss;

Expand Down Expand Up @@ -92,7 +108,8 @@ export class AutoencoderGPU<DecodedData extends INeuralNetworkData, EncodedData
this.run(input);

// Get the auto-encoded input.
let encodedInput: TextureArrayOutput = this.encodedLayer as TextureArrayOutput;
let encodedInput: TextureArrayOutput = this
.encodedLayer as TextureArrayOutput;

// If the encoded input is a `Texture`, convert it into an `Array`.
if (encodedInput instanceof Texture) encodedInput = encodedInput.toArray();
Expand All @@ -110,7 +127,7 @@ export class AutoencoderGPU<DecodedData extends INeuralNetworkData, EncodedData
* @param {DecodedData} input
* @returns {boolean}
*/
likelyIncludesAnomalies(input: DecodedData, anomalyThreshold: number = 0.2): boolean {
likelyIncludesAnomalies(input: DecodedData, anomalyThreshold = 0.2): boolean {
// Create the anomaly vector.
const anomalies: number[] = [];

Expand All @@ -119,7 +136,9 @@ export class AutoencoderGPU<DecodedData extends INeuralNetworkData, EncodedData

// Calculate the anomaly vector.
for (let i = 0; i < (input.length ?? 0); i++) {
anomalies[i] = Math.abs((input as number[])[i] - (denoised as number[])[i]);
anomalies[i] = Math.abs(
(input as number[])[i] - (denoised as number[])[i]
);
}

// Calculate the sum of all anomalies within the vector.
Expand All @@ -141,13 +160,24 @@ export class AutoencoderGPU<DecodedData extends INeuralNetworkData, EncodedData
* @param {Partial<INeuralNetworkTrainOptions>} options
* @returns {INeuralNetworkState}
*/
train(data: Partial<DecodedData>[] | INeuralNetworkDatum<Partial<DecodedData>, Partial<DecodedData>>[], options?: Partial<INeuralNetworkTrainOptions>): INeuralNetworkState {
const preprocessedData: INeuralNetworkDatum<Partial<DecodedData>, Partial<DecodedData>>[] = [];
train(
data:
| Array<Partial<DecodedData>>
| Array<INeuralNetworkDatum<Partial<DecodedData>, Partial<DecodedData>>>,
options?: Partial<INeuralNetworkTrainOptions>
): INeuralNetworkState {
const preprocessedData: Array<INeuralNetworkDatum<
Partial<DecodedData>,
Partial<DecodedData>
>> = [];

if (data.length && data.length > 0)
for (let datum of data) {
preprocessedData.push( { input: datum as Partial<DecodedData>, output: datum as Partial<DecodedData> } );
}
for (const datum of data) {
preprocessedData.push({
input: datum as Partial<DecodedData>,
output: datum as Partial<DecodedData>,
});
}

const results = super.train(preprocessedData, options);

Expand Down Expand Up @@ -179,7 +209,7 @@ export class AutoencoderGPU<DecodedData extends INeuralNetworkData, EncodedData

const decoder = new NeuralNetworkGPU().fromJSON(json);

return decoder as unknown as NeuralNetworkGPU<EncodedData, DecodedData>;
return (decoder as unknown) as NeuralNetworkGPU<EncodedData, DecodedData>;
}

/**
Expand Down
14 changes: 8 additions & 6 deletions src/errors/untrained-neural-network-error.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
import { NeuralNetwork } from "../neural-network";
interface IErrorableNeuralNetworkConstructor {
name: string;
}

interface IErrorableNeuralNetwork {
constructor: Function;
constructor: IErrorableNeuralNetworkConstructor;
}

export class UntrainedNeuralNetworkError extends Error {
constructor (
neuralNetwork: IErrorableNeuralNetwork
) {
super(`Cannot run a ${neuralNetwork.constructor.name} before it is trained.`);
constructor(neuralNetwork: IErrorableNeuralNetwork) {
super(
`Cannot run a ${neuralNetwork.constructor.name} before it is trained.`
);
}
}
Loading
Loading
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy