Created
March 30, 2024 17:20
-
-
Save rtkclouds/db0426bb434aa6fea1b64f6b47562a0c to your computer and use it in GitHub Desktop.
conv3 maze teste.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Importing necessary libraries and modules | |
let _ = require('lodash') // lodash for utility functions | |
let tf = require('@tensorflow/tfjs-node') // TensorFlow.js for machine learning | |
let colors = require('colors') // colors for terminal output | |
// Importing functions from Algernon library | |
const algernon = require('algernon-js'); | |
let { | |
generateBacktrackingRaw, | |
solveAStarRaw, | |
renderRawMazeToCanvas, | |
serializeRawToBinary, | |
} = require("algernon-js") | |
// Defining an asynchronous function to generate maze datasets | |
async function generateMazeDataset(samples) { | |
const [rows, cols] = [32, 32]; // Define the size of the maze | |
let xs = []; // Array to store maze data | |
let ys = []; // Array to store solution data | |
// Generate samples of maze and their solutions | |
for (let i = 0; i < samples; i++) { | |
// Generate the maze using Algernon's growing tree algorithm | |
const rawMaze = algernon.generateGrowingTreeGrid(rows, cols); | |
// Solve the maze using A* algorithm | |
const solution = algernon.solveAStarGrid(rawMaze, [0, 0], [30, 30]); | |
// Serialize the maze and solution to binary format | |
const serializedMaze = _.flatten(rawMaze.map(s => s.map(s => s ? 1 : 0))) | |
const serializedMaze2 = serializedMaze.slice() | |
solution.map(s => { | |
serializedMaze2[(s[0] * 32) + s[1]] = 2 | |
}) | |
// Mark the solution path in the maze with '2' | |
// Add the maze and the marked solution to the datasets | |
xs.push(serializedMaze); | |
ys.push(serializedMaze2); | |
} | |
// Return the generated datasets | |
return { xs, ys }; | |
} | |
// Function to run the training process | |
async function run() { | |
// Generate training and testing datasets | |
let data = await generateMazeDataset(5000) | |
let dataTest = await generateMazeDataset(100) | |
// Split datasets into input (xs) and output (ys) | |
let xsTrain = data.xs; | |
let ysTrain = data.ys; | |
let xsTest = dataTest.xs; | |
let ysTest = dataTest.ys; | |
// Define the neural network architecture | |
let input = tf.layers.input({ | |
shape: [1024] // Input shape | |
}) | |
let x = input | |
// Reshape the input for convolutional layers | |
x = tf.layers.reshape({ targetShape: [8, 8, 4, 4] }).apply(x) | |
// Convolutional layers | |
x = tf.layers.conv3d({ filters: 64, kernelSize: 8, padding: 'same', activation: "tanh" }).apply(x) | |
x = tf.layers.conv3d({ filters: 64, kernelSize: 5, padding: 'same', activation: "tanh" }).apply(x) | |
x = tf.layers.conv3d({ filters: 256, kernelSize: 3, padding: 'same', activation: "tanh" }).apply(x) | |
// Reshape back to flat for dense layers | |
x = tf.layers.reshape({ targetShape: [1024, 64] }).apply(x) | |
// Dense layer | |
let x1 = tf.layers.dense({ | |
units: 20, | |
activation: "softmax" // Output activation function | |
}).apply(x) | |
// Create the model | |
let model = tf.model({ | |
inputs: [input], | |
outputs: [x1] | |
}) | |
// Compile the model with optimizer and loss function | |
model.compile({ | |
loss: 'sparseCategoricalCrossentropy', | |
metrics: ['acc'], // Evaluation metrics | |
optimizer: tf.train.adam(.001) // Optimizer | |
}) | |
// Define a data generator function | |
function makeIterator() { | |
let n = 0 | |
let idx = (new Array(ysTrain.length).fill(0).map((a, i) => i)) | |
const iterator = { | |
next: x => tf.tidy(() => { | |
let result; | |
let px = [] | |
let py = [] | |
n++ | |
for (let k = 0; k < 128; k++) { | |
let u = idx[(n + k) % xsTrain.length] | |
let xs = xsTrain[u] | |
let ys = ysTrain[u] | |
px.push(xs) | |
py.push(ys) | |
} | |
// Convert data to tensors | |
let tx1 = tf.tensor(px).cast('float32') | |
let ty1 = tf.tensor(py).cast('float32').expandDims(-1) | |
// Prepare result | |
result = { | |
value: { | |
xs: tx1, // Input data tensor | |
ys: ty1 // Output data tensor | |
}, | |
done: false | |
}; | |
return result; | |
}) | |
} | |
return iterator; | |
} | |
// Create a TensorFlow.js dataset using the data generator | |
const ds = tf.data.generator(makeIterator); | |
// Train the model using the dataset | |
model.fitDataset(ds, { | |
batchesPerEpoch: Math.round(xsTrain.length / 32), // Number of batches per epoch | |
epochs: 100, // Number of epochs | |
verbose: 1, // Verbosity mode | |
callbacks: { | |
onEpochEnd() { | |
// Print sample predictions at the end of each epoch | |
for (let x = 0; x < 5; x++) { | |
let k = _.random(0, xsTest.length - 1) | |
let res = model.predict(tf.tensor(xsTest)).argMax(-1).arraySync() | |
console.log('sample', x) | |
console.log('input \n:', _.chunk(xsTest.map(s => (s.join('')))[k].split(''), 32).map(s => s.map(s => s == 2 ? colors.bgBlue(' ') : s == 1 ? colors.bgRed(' ') : ' ').join('')).join('\n')) | |
console.log('target\n:', _.chunk(ysTest.map(s => (s.join('')))[k].split(''), 32).map(s => s.map(s => s == 2 ? colors.bgBlue(' ') : s == 1 ? colors.bgRed(' ') : ' ').join('')).join('\n')) | |
console.log('predict:\n', _.chunk(res.map(s => (s.join('')))[k].split(''), 32).map(s => s.map(s => s == 2 ? colors.bgBlue(' ') : s == 1 ? colors.bgRed(' ') : ' ').join('')).join('\n')) | |
} | |
} | |
} | |
}) | |
} | |
// Execute the training process | |
run() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment