Last active
June 15, 2024 20:31
-
-
Save rtkclouds/73988204e21705f7c15f43b3e2da088e to your computer and use it in GitHub Desktop.
test to validate sequence-to-sequence learning.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Importing necessary libraries: lodash, TensorFlow.js, and colors | |
let _ = require('lodash'); | |
let tf = require('@tensorflow/tfjs-node'); | |
let colors=require('colors'); | |
// Function to generate a dataset of binary arrays with a given size | |
function buildDataset(sp) { | |
let s = [] | |
for (let samples = 0; samples < sp; samples++) { | |
// Initialize an array with a single 0 element | |
let r = [0] | |
// Generate a random number between 0 and 50 | |
let n = _.random(0, 50) | |
while (r.length < 1024) { | |
// Decrement n and ensure it's non-negative | |
n = n - 1 | |
n = n < 0 ? 0 : n | |
// Generate another random number between 10 and 20 | |
let n2 = _.random(10, 20) | |
// Add 1s and 0s to the array based on the random numbers | |
for (let x = 0; x < n; x++) { | |
r.push(1) | |
} | |
for (let x = 0; x < n2; x++) { | |
r.push(0) | |
} | |
} | |
// Divide the array into chunks of 4 elements | |
s.push(_.chunk(r.slice(0, 1024), 4)) | |
r = [] | |
} | |
return s | |
} | |
// Generate the training and testing datasets | |
let data = buildDataset(2048) | |
let dataTest = buildDataset(4) | |
// Create arrays for training and testing data | |
let xsTrain = [] | |
let ysTrain = [] | |
let xsTest = [] | |
let ysTest = [] | |
// Populate the training and testing arrays with data from the generated datasets | |
for (let s = 0; s < data.length; s++) { | |
for (let k = 0; k < data[s].length; k++) { | |
let pool = data[s].slice(k, 64 + k) | |
if (pool.length == 64) { | |
xsTrain.push(_.take(pool, 32)) | |
ysTrain.push(_.takeRight(pool, 32)) | |
} | |
} | |
} | |
for (let s = 0; s < dataTest.length; s++) { | |
for (let k = 0; k < dataTest[s].length; k++) { | |
let pool = dataTest[s].slice(k, 64 + k) | |
if (pool.length == 64) { | |
xsTest.push(_.take(pool, 32)) | |
ysTest.push(_.takeRight(pool, 32)) | |
} | |
} | |
} | |
// Define the input layer of the neural network | |
let input = tf.layers.input({ | |
shape: [32, 4] | |
}) | |
// Add convolutional and reshaping layers to the network | |
let x = tf.layers.reshape({ targetShape: [2, 4, 4, 4] }).apply(input) | |
x = tf.layers.conv3d({ filters: 32, kernelSize: 5, padding: 'same', activation: "tanh" }).apply(x) | |
x = tf.layers.conv3d({ filters: 32, kernelSize: 5, padding: 'same', activation: "tanh" }).apply(x) | |
x = tf.layers.conv3d({ filters: 32, kernelSize: 5, padding: 'same', activation: "tanh" }).apply(x) | |
x = tf.layers.conv3d({ filters: 32, kernelSize: 1, padding: 'same', activation: "tanh" }).apply(x) | |
x = tf.layers.reshape({ targetShape: [32, 4,8] }).apply(x) | |
// Add a dense layer with softmax activation to the network | |
let x1 = tf.layers.dense({ | |
units: 2, | |
activation: "softmax" | |
}).apply(x) | |
// Define the neural network model | |
let model = tf.model({ | |
inputs: [input], | |
outputs: [x1] | |
}) | |
// Compile the model with a mean squared error loss function, | |
// an optimizer, and a metric for evaluation | |
model.compile({ | |
loss: 'sparseCategoricalCrossentropy', | |
metrics: ['acc'], | |
optimizer: tf.train.adam(.0001) | |
}) | |
// Define a function to generate a data iterator for training | |
function makeIterator() { | |
let n = 0 | |
let idx=_.shuffle(new Array(ysTrain.length).fill(0).map((a,i)=>i)) | |
const iterator = { | |
next: x => tf.tidy(() => { | |
let result; | |
let px = [] | |
let py = [] | |
n++ | |
for (let k = 0; k < 128; k++) { | |
let u=idx[(n + k) % xsTrain.length] | |
let xs = xsTrain[u] | |
let ys = ysTrain[u] | |
px.push(xs) | |
py.push(ys) | |
} | |
let tx1 = tf.tensor(px) | |
let ty1 = tf.tensor(py).expandDims(-1) | |
result = { | |
value: { | |
xs: tx1, | |
ys: ty1 | |
}, | |
done: false | |
}; | |
return result; | |
}) | |
} | |
return iterator; | |
} | |
// Create a TensorFlow.js dataset from the data iterator | |
const ds = tf.data.generator(makeIterator); | |
// Train the model using the dataset | |
model.fitDataset(ds, { | |
batchesPerEpoch: xsTest.length, | |
epochs: 100, | |
verbose:1, | |
callbacks:{ | |
onEpochEnd(){ | |
// Print the prediction accuracy of the model on a random subset of the testing set | |
// every epoch | |
for(let x=0;x<5;x++){ | |
let k=_.random(0,xsTest.length-1) | |
let res=model.predict(tf.tensor(xsTest)).argMax(-1).arraySync() | |
console.log('sample',x) | |
console.log('input :',xsTest.map(s=>s.map(s=>s.join('')).join(''))[k].split('').map(s=>s=='1'?colors.bgBlue(' '):colors.bgWhite(' ')).join('')) | |
console.log('target :',ysTest.map(s=>s.map(s=>s.join('')).join(''))[k].split('').map(s=>s=='1'?colors.bgBlue(' '):colors.bgWhite(' ')).join('')) | |
console.log('predict:',res.map(s=>s.map(s=>s.join('')).join(''))[k].split('').map(s=>s=='1'?colors.bgRed(' '):colors.bgWhite(' ')).join('')) | |
} | |
} | |
} | |
}) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment