From b2d85b992cb76208629456cd7f6335831ed9c9f7 Mon Sep 17 00:00:00 2001 From: Linda Paiste Date: Thu, 14 Mar 2024 15:19:59 -0500 Subject: [PATCH 1/3] Cleanup neural network callbacks --- src/NeuralNetwork/NeuralNetwork.js | 46 +++------ src/NeuralNetwork/NeuralNetworkData.js | 37 +++---- src/NeuralNetwork/index.js | 129 +++++++++++++------------ src/utils/callcallback.js | 4 +- 4 files changed, 90 insertions(+), 126 deletions(-) diff --git a/src/NeuralNetwork/NeuralNetwork.js b/src/NeuralNetwork/NeuralNetwork.js index faabf94f..c6bcfa3b 100644 --- a/src/NeuralNetwork/NeuralNetwork.js +++ b/src/NeuralNetwork/NeuralNetwork.js @@ -1,7 +1,5 @@ import * as tf from "@tensorflow/tfjs"; import axios from "axios"; -import callCallback from "../utils/callcallback"; -import handleArguments from "../utils/handleArguments"; import { saveBlob } from "../utils/io"; import { randomGaussian } from "../utils/random"; @@ -11,7 +9,9 @@ class NeuralNetwork { this.isTrained = false; this.isCompiled = false; this.isLayered = false; - // the model + /** + * @type {tf.Sequential | null} - the TensorFlow model + */ this.model = null; // methods @@ -21,7 +21,6 @@ class NeuralNetwork { this.compile = this.compile.bind(this); this.setOptimizerFunction = this.setOptimizerFunction.bind(this); this.train = this.train.bind(this); - this.trainInternal = this.trainInternal.bind(this); this.predict = this.predict.bind(this); this.classify = this.classify.bind(this); this.save = this.save.bind(this); @@ -89,20 +88,11 @@ class NeuralNetwork { return optimizer.call(this, learningRate); } - /** - * Calls the trainInternal() and calls the callback when finished - * @param {*} _options - * @param {*} _cb - */ - train(_options, _cb) { - return callCallback(this.trainInternal(_options), _cb); - } - /** * Train the model - * @param {*} _options + * @param {Object} _options */ - async trainInternal(_options) { + async train(_options) { const TRAINING_OPTIONS = _options; const xs = TRAINING_OPTIONS.inputs; @@ -178,15 +168,12 @@ class NeuralNetwork { // are the same as .predict() /** - * save the model - * @param {*} nameOrCb - * @param {*} cb + * save the model.json and the weights.bin files + * @param {string} modelName + * @return {Promise} */ - async save(nameOrCb, cb) { - const { string, callback } = handleArguments(nameOrCb, cb); - const modelName = string || "model"; - - this.model.save( + async save(modelName = "model") { + await this.model.save( tf.io.withSaveHandler(async (data) => { this.weightsManifest = { modelTopology: data.modelTopology, @@ -208,19 +195,15 @@ class NeuralNetwork { `${modelName}.json`, "text/plain" ); - if (callback) { - callback(); - } }) ); } /** * loads the model and weights - * @param {*} filesOrPath - * @param {*} callback + * @param {string | FileList | Object} filesOrPath */ - async load(filesOrPath = null, callback) { + async load(filesOrPath) { if (filesOrPath instanceof FileList) { const files = await Promise.all( Array.from(filesOrPath).map(async (file) => { @@ -277,11 +260,6 @@ class NeuralNetwork { this.isCompiled = true; this.isLayered = true; this.isTrained = true; - - if (callback) { - callback(); - } - return this.model; } /** diff --git a/src/NeuralNetwork/NeuralNetworkData.js b/src/NeuralNetwork/NeuralNetworkData.js index ad851afc..76339553 100644 --- a/src/NeuralNetwork/NeuralNetworkData.js +++ b/src/NeuralNetwork/NeuralNetworkData.js @@ -1,6 +1,5 @@ import * as tf from "@tensorflow/tfjs"; import axios from "axios"; -import handleArguments from "../utils/handleArguments"; import { saveBlob } from "../utils/io"; import nnUtils from "./NeuralNetworkUtils"; @@ -701,10 +700,10 @@ class NeuralNetworkData { /** * loadData from fileinput or path - * @param {*} filesOrPath - * @param {*} callback + * @param {string | FileList | Object} filesOrPath + * @return {Promise} */ - async loadData(filesOrPath = null, callback) { + async loadData(filesOrPath) { try { let loadedData; @@ -740,10 +739,6 @@ class NeuralNetworkData { 'data must be a json object containing an array called "data" ' ); } - - if (callback) { - callback(); - } } catch (error) { throw new Error(error); } @@ -751,7 +746,8 @@ class NeuralNetworkData { /** * saveData - * @param {*} name + * @param {string} [name] + * @return {Promise} */ async saveData(name) { const today = new Date(); @@ -775,29 +771,23 @@ class NeuralNetworkData { /** * Saves metadata of the data - * @param {*} nameOrCb - * @param {*} cb + * @param {string} modelName + * @return {Promise} */ - async saveMeta(nameOrCb, cb) { - const { string, callback } = handleArguments(nameOrCb, cb); - const modelName = string || "model"; - + async saveMeta(modelName = "model") { await saveBlob( JSON.stringify(this.meta), `${modelName}_meta.json`, "text/plain" ); - if (callback) { - callback(); - } } /** * load a model and metadata - * @param {*} filesOrPath - * @param {*} callback + * @param {string | FileList | Object} filesOrPath + * @return {Promise} */ - async loadMeta(filesOrPath = null, callback) { + async loadMeta(filesOrPath) { if (filesOrPath instanceof FileList) { const files = await Promise.all( Array.from(filesOrPath).map(async (file) => { @@ -852,11 +842,6 @@ class NeuralNetworkData { this.isMetadataReady = true; this.isWarmedUp = true; - - if (callback) { - callback(); - } - return this.meta; } /* diff --git a/src/NeuralNetwork/index.js b/src/NeuralNetwork/index.js index 00dfd420..1dc9037b 100644 --- a/src/NeuralNetwork/index.js +++ b/src/NeuralNetwork/index.js @@ -24,8 +24,7 @@ const DEFAULTS = { neuroEvolution: false, }; class DiyNeuralNetwork { - constructor(options, cb) { - this.callback = cb; + constructor(options, callback) { // Is there a better way to handle a different // default learning rate for image classification tasks? @@ -47,14 +46,11 @@ class DiyNeuralNetwork { training: [], }; - this.ready = false; - // Methods this.init = this.init.bind(this); // adding data this.addData = this.addData.bind(this); this.loadDataFromUrl = this.loadDataFromUrl.bind(this); - this.loadDataInternal = this.loadDataInternal.bind(this); // metadata prep this.createMetaData = this.createMetaData.bind(this); // data prep and handling @@ -99,7 +95,7 @@ class DiyNeuralNetwork { this.crossover = this.crossover.bind(this); // Initialize - this.init(this.callback); + this.ready = callCallback(this.init(), callback); } /** @@ -109,23 +105,23 @@ class DiyNeuralNetwork { */ /** - * init - * @param {*} callback + * @private + * init - handles the options provided to the constructor for creating layers, loading a model, and loading data. + * @return {Promise} - will be awaited by this.ready */ - init(callback) { + async init() { // check if the a static model should be built based on the inputs and output properties if (this.options.neuroEvolution === true) { this.createLayersNoTraining(); } - if (this.options.dataUrl !== null) { - this.ready = this.loadDataFromUrl(this.options, callback); - } else if (this.options.modelUrl !== null) { + if (this.options.dataUrl) { + await this.loadDataFromUrl(); + } else if (this.options.modelUrl) { // will take a URL to model.json, an object, or files array - this.ready = this.load(this.options.modelUrl, callback); - } else { - this.ready = true; + await this.load(this.options.modelUrl); } + return this; } /** @@ -224,20 +220,12 @@ class DiyNeuralNetwork { } /** - * loadData - * @param {*} options - * @param {*} callback - */ - loadDataFromUrl(options, callback) { - return callCallback(this.loadDataInternal(options), callback); - } - - /** - * loadDataInternal - * @param {*} options + * @private + * called by init() when there is a `dataUrl` in the constructor options. + * TODO: why does this have different logic from loadData? (Passes input/output labels, creates metadata, prepares for training) - Linda */ - async loadDataInternal(options) { - const { dataUrl, inputs, outputs } = options; + async loadDataFromUrl() { + const { dataUrl, inputs, outputs } = this.options; const data = await this.neuralNetworkData.loadDataFromUrl( dataUrl, @@ -248,7 +236,7 @@ class DiyNeuralNetwork { // once the data are loaded, create the metadata // and prep the data for training // if the inputs are defined as an array of [img_width, img_height, channels] - this.createMetadata(data); + this.createMetaData(data); this.prepareForTraining(data); } @@ -494,8 +482,9 @@ class DiyNeuralNetwork { * @param {*} optionsOrCallback * @param {*} optionsOrWhileTraining * @param {*} callback + * @return {Promise} */ - train(optionsOrCallback, optionsOrWhileTraining, callback) { + async train(optionsOrCallback, optionsOrWhileTraining, callback) { let options; let whileTrainingCb; let finishedTrainingCb; @@ -527,15 +516,16 @@ class DiyNeuralNetwork { finishedTrainingCb = optionsOrCallback; } - this.trainInternal(options, whileTrainingCb, finishedTrainingCb); + return callCallback(this.trainInternal(options, whileTrainingCb), finishedTrainingCb); } /** * train - * @param {*} _options - * @param {*} _cb + * @param {Object} _options + * @param {function} [whileTrainingCb] + * @return {Promise} */ - trainInternal(_options, whileTrainingCb, finishedTrainingCb) { + async trainInternal(_options, whileTrainingCb) { const options = { epochs: 10, batchSize: 32, @@ -613,7 +603,7 @@ class DiyNeuralNetwork { } // train once the model is compiled - this.neuralNetwork.train(options, finishedTrainingCb); + await this.neuralNetwork.train(options); } /** @@ -1150,20 +1140,29 @@ class DiyNeuralNetwork { */ /** - * save data - * @param {*} name + * @public + * saves the training data to a JSON file. + * @param {string} [name] Optional - The name for the saved file. + * Should not include the file extension. + * Defaults to the current date and time. + * @param {ML5Callback} [callback] Optional - A function to call when the save is complete. + * @return {Promise} */ - saveData(name) { - this.neuralNetworkData.saveData(name); + saveData(name, callback) { + const args = handleArguments(name, callback); + return callCallback(this.neuralNetworkData.saveData(args.name), args.callback); } /** + * @public * load data - * @param {*} filesOrPath - * @param {*} callback + * @param {string | FileList | Object} filesOrPath - The URL of the file to load, + * or a FileList object (.files) from an HTML element . + * @param {ML5Callback} [callback] Optional - A function to call when the loading is complete. + * @return {Promise} */ - async loadData(filesOrPath = null, callback) { - this.neuralNetworkData.loadData(filesOrPath, callback); + async loadData(filesOrPath, callback) { + return callCallback(this.neuralNetworkData.loadData(filesOrPath), callback); } /** @@ -1173,36 +1172,38 @@ class DiyNeuralNetwork { */ /** + * @public * saves the model, weights, and metadata - * @param {*} nameOrCb - * @param {*} cb + * @param {string} [name] Optional - The name for the saved file. + * Should not include the file extension. + * Defaults to 'model'. + * @param {ML5Callback} [callback] Optional - A function to call when the save is complete. + * @return {Promise} */ - save(nameOrCb, cb) { - const { string, callback } = handleArguments(nameOrCb, cb); - const modelName = string || "model"; + async save(name, callback) { + const args = handleArguments(name, callback); + const modelName = args.string || 'model'; // save the model - this.neuralNetwork.save(modelName, () => { - this.neuralNetworkData.saveMeta(modelName, callback); - }); + return callCallback(Promise.all([ + this.neuralNetwork.save(modelName), + this.neuralNetworkData.saveMeta(modelName) + ]), args.callback); } /** + * @public - also called internally by init() when there is a modelUrl in the options * load a model and metadata - * @param {*} filesOrPath - * @param {*} callback + * @param {string | FileList | Object} filesOrPath - The URL of the file to load, + * or a FileList object (.files) from an HTML element . + * @param {ML5Callback} [callback] Optional - A function to call when the loading is complete. + * @return {Promise} */ - async load(filesOrPath = null, cb) { - let callback; - if (cb) { - callback = cb; - } - - this.neuralNetwork.load(filesOrPath, () => { - this.neuralNetworkData.loadMeta(filesOrPath, callback); - - return this.neuralNetwork.model; - }); + async load(filesOrPath, callback) { + return callCallback(Promise.all([ + this.neuralNetwork.load(filesOrPath), + this.neuralNetworkData.loadMeta(filesOrPath) + ]), callback); } /** diff --git a/src/utils/callcallback.js b/src/utils/callcallback.js index bf3cc6e2..0352c04a 100644 --- a/src/utils/callcallback.js +++ b/src/utils/callcallback.js @@ -10,8 +10,8 @@ * Generic type T describes the type of the result. * @template T * @callback ML5Callback - * @param {unknown} error - any error thrown during the execution of the function. - * @param {T} [result] - the expected result, if successful. + * @param {T | undefined} result - the expected result, if successful. + * @param {unknown} [error] - any error thrown during the execution of the function. * @return {void} - callbacks can have side effects, but should not return a value. */ From d2212dfd125d09caf198da66a8f6d3db1382c589 Mon Sep 17 00:00:00 2001 From: Linda Paiste Date: Fri, 15 Mar 2024 19:25:40 -0500 Subject: [PATCH 2/3] Cleanup unnecessary passing of arguments between neural network functions. --- src/NeuralNetwork/NeuralNetwork.js | 8 +- src/NeuralNetwork/NeuralNetworkData.js | 251 +++++++++---------------- src/NeuralNetwork/index.js | 161 ++++++++-------- 3 files changed, 167 insertions(+), 253 deletions(-) diff --git a/src/NeuralNetwork/NeuralNetwork.js b/src/NeuralNetwork/NeuralNetwork.js index faabf94f..1b719080 100644 --- a/src/NeuralNetwork/NeuralNetwork.js +++ b/src/NeuralNetwork/NeuralNetwork.js @@ -57,11 +57,11 @@ class NeuralNetwork { /** * add layer to the model * if the model has 2 or more layers switch the isLayered flag - * @param {*} _layerOptions + * @param {tf.Layer} layer + * @void */ - addLayer(_layerOptions) { - const LAYER_OPTIONS = _layerOptions || {}; - this.model.add(LAYER_OPTIONS); + addLayer(layer) { + this.model.add(layer); // check if it has at least an input and output layer if (this.model.layers.length >= 2) { diff --git a/src/NeuralNetwork/NeuralNetworkData.js b/src/NeuralNetwork/NeuralNetworkData.js index ad851afc..7efbd004 100644 --- a/src/NeuralNetwork/NeuralNetworkData.js +++ b/src/NeuralNetwork/NeuralNetworkData.js @@ -21,43 +21,6 @@ class NeuralNetworkData { this.data = { raw: [], // array of {xs:{}, ys:{}} }; - - // methods - // summarize data - this.createMetadata = this.createMetadata.bind(this); - this.getDataStats = this.getDataStats.bind(this); - this.getInputMetaStats = this.getInputMetaStats.bind(this); - this.getDataUnits = this.getDataUnits.bind(this); - this.getInputMetaUnits = this.getInputMetaUnits.bind(this); - this.getDTypesFromData = this.getDTypesFromData.bind(this); - // add data - this.addData = this.addData.bind(this); - // data conversion - this.convertRawToTensors = this.convertRawToTensors.bind(this); - // data normalization / unnormalization - this.normalizeDataRaw = this.normalizeDataRaw.bind(this); - this.normalizeInputData = this.normalizeInputData.bind(this); - this.normalizeArray = this.normalizeArray.bind(this); - this.unnormalizeArray = this.unnormalizeArray.bind(this); - // one hot - this.applyOneHotEncodingsToDataRaw = - this.applyOneHotEncodingsToDataRaw.bind(this); - this.getDataOneHot = this.getDataOneHot.bind(this); - this.getInputMetaOneHot = this.getInputMetaOneHot.bind(this); - this.createOneHotEncodings = this.createOneHotEncodings.bind(this); - // Saving / loading data - this.loadDataFromUrl = this.loadDataFromUrl.bind(this); - this.loadJSON = this.loadJSON.bind(this); - this.loadCSV = this.loadCSV.bind(this); - this.loadBlob = this.loadBlob.bind(this); - this.loadData = this.loadData.bind(this); - this.saveData = this.saveData.bind(this); - this.saveMeta = this.saveMeta.bind(this); - this.loadMeta = this.loadMeta.bind(this); - // data loading helpers - this.findEntries = this.findEntries.bind(this); - this.formatRawData = this.formatRawData.bind(this); - this.csvToJSON = this.csvToJSON.bind(this); } /** @@ -73,21 +36,20 @@ class NeuralNetworkData { * 2. getting the min and max from the data * 3. getting the oneHot encoded values * 4. getting the inputShape and outputUnits from the data - * @param {*} dataRaw - * @param {*} inputShape + * @param {Array} [inputShape] + * @void */ - createMetadata(dataRaw, inputShape = null) { + createMetadata(inputShape = null) { // get the data type for each property - this.getDTypesFromData(dataRaw); + this.getDTypesFromData(); // get the stats - min, max - this.getDataStats(dataRaw); + this.getDataStats(); // onehot encode - this.getDataOneHot(dataRaw); + this.getDataOneHot(); // calculate the input units from the data - this.getDataUnits(dataRaw, inputShape); + this.getDataUnits(inputShape); this.isMetadataReady = true; - return { ...this.meta }; } /* @@ -98,34 +60,22 @@ class NeuralNetworkData { /** * get stats about the data - * @param {*} dataRaw + * @private + * @void */ - getDataStats(dataRaw) { - const meta = Object.assign({}, this.meta); - - const inputMeta = this.getInputMetaStats(dataRaw, meta.inputs, "xs"); - const outputMeta = this.getInputMetaStats(dataRaw, meta.outputs, "ys"); - - meta.inputs = inputMeta; - meta.outputs = outputMeta; - - this.meta = { - ...this.meta, - ...meta, - }; - - return meta; + getDataStats() { + this.meta.inputs = this.getInputMetaStats(this.meta.inputs, "xs"); + this.meta.outputs = this.getInputMetaStats(this.meta.outputs, "ys"); } /** - * getRawStats * get back the min and max of each label - * @param {*} dataRaw - * @param {*} inputOrOutputMeta - * @param {*} xsOrYs + * @private + * @param {Object} inputOrOutputMeta + * @param {"xs" | "ys"} xsOrYs + * @return {Object} */ - // eslint-disable-next-line no-unused-vars, class-methods-use-this - getInputMetaStats(dataRaw, inputOrOutputMeta, xsOrYs) { + getInputMetaStats(inputOrOutputMeta, xsOrYs) { const inputMeta = Object.assign({}, inputOrOutputMeta); Object.keys(inputMeta).forEach((k) => { @@ -133,11 +83,11 @@ class NeuralNetworkData { inputMeta[k].min = 0; inputMeta[k].max = 1; } else if (inputMeta[k].dtype === "number") { - const dataAsArray = dataRaw.map((item) => item[xsOrYs][k]); + const dataAsArray = this.data.raw.map((item) => item[xsOrYs][k]); inputMeta[k].min = nnUtils.getMin(dataAsArray); inputMeta[k].max = nnUtils.getMax(dataAsArray); } else if (inputMeta[k].dtype === "array") { - const dataAsArray = dataRaw.map((item) => item[xsOrYs][k]).flat(); + const dataAsArray = this.data.raw.map((item) => item[xsOrYs][k]).flat(); inputMeta[k].min = nnUtils.getMin(dataAsArray); inputMeta[k].max = nnUtils.getMax(dataAsArray); } @@ -148,42 +98,29 @@ class NeuralNetworkData { /** * get the data units, inputshape and output units - * @param {*} dataRaw + * @private + * @param {Array} arrayShape + * @void */ - getDataUnits(dataRaw, _arrayShape = null) { - const arrayShape = _arrayShape !== null ? _arrayShape : undefined; - const meta = Object.assign({}, this.meta); - + getDataUnits(arrayShape = null) { // if the data has a shape pass it in - let inputShape; if (arrayShape) { - inputShape = arrayShape; + this.meta.inputUnits = arrayShape; } else { - inputShape = [this.getInputMetaUnits(dataRaw, meta.inputs)].flat(); + this.meta.inputUnits = [this.getInputMetaUnits(this.meta.inputs)].flat(); } - const outputShape = this.getInputMetaUnits(dataRaw, meta.outputs); - - meta.inputUnits = inputShape; - meta.outputUnits = outputShape; - - this.meta = { - ...this.meta, - ...meta, - }; - - return meta; + this.meta.outputUnits = this.getInputMetaUnits(this.meta.outputs); } /** - * get input - * @param {*} _inputsMeta - * @param {*} _dataRaw + * @private + * @param {Object} inputsMeta + * @return {number | Array} */ - // eslint-disable-next-line class-methods-use-this, no-unused-vars - getInputMetaUnits(_dataRaw, _inputsMeta) { + // eslint-disable-next-line class-methods-use-this + getInputMetaUnits(inputsMeta) { let units = 0; - const inputsMeta = Object.assign({}, _inputsMeta); Object.entries(inputsMeta).forEach((arr) => { const { dtype } = arr[1]; @@ -208,15 +145,17 @@ class NeuralNetworkData { * getDTypesFromData * gets the data types of the data we're using * important for handling oneHot + * @private + * @void - updates this.meta */ - getDTypesFromData(_dataRaw) { + getDTypesFromData() { const meta = { ...this.meta, inputs: {}, outputs: {}, }; - const sample = _dataRaw[0]; + const sample = this.data.raw[0]; const xs = Object.keys(sample.xs); const ys = Object.keys(sample.ys); @@ -236,8 +175,6 @@ class NeuralNetworkData { // otherwise throw an error this.meta = meta; - - return meta; } /** @@ -250,6 +187,7 @@ class NeuralNetworkData { * Add Data * @param {object} xInputObj, {key: value}, key must be the name of the property value must be a String, Number, or Array * @param {*} yInputObj, {key: value}, key must be the name of the property value must be a String, Number, or Array + * @void - updates this.data */ addData(xInputObj, yInputObj) { this.data.raw.push({ @@ -267,8 +205,9 @@ class NeuralNetworkData { /** * convertRawToTensors * converts array of {xs, ys} to tensors - * @param {*} _dataRaw - * @param {*} meta + * @param {*} dataRaw + * + * @return {{ inputs: tf.Tensor, outputs: tf.Tensor }} */ // eslint-disable-next-line class-methods-use-this, no-unused-vars convertRawToTensors(dataRaw) { @@ -323,13 +262,11 @@ class NeuralNetworkData { /** * normalize the dataRaw input - * @param {*} dataRaw + * @return {Array} */ - normalizeDataRaw(dataRaw) { - const meta = Object.assign({}, this.meta); - - const normXs = this.normalizeInputData(dataRaw, meta.inputs, "xs"); - const normYs = this.normalizeInputData(dataRaw, meta.outputs, "ys"); + normalizeDataRaw() { + const normXs = this.normalizeInputData(this.meta.inputs, "xs"); + const normYs = this.normalizeInputData(this.meta.outputs, "ys"); const normalizedData = nnUtils.zipArrays(normXs, normYs); @@ -337,13 +274,12 @@ class NeuralNetworkData { } /** - * normalizeRaws - * @param {*} dataRaw - * @param {*} inputOrOutputMeta - * @param {*} xsOrYs + * @param {Object} inputOrOutputMeta + * @param {"xs" | "ys"} xsOrYs + * @return {Array} */ - // eslint-disable-next-line no-unused-vars, class-methods-use-this - normalizeInputData(dataRaw, inputOrOutputMeta, xsOrYs) { + normalizeInputData(inputOrOutputMeta, xsOrYs) { + const dataRaw = this.data.raw; // the data length const dataLength = dataRaw.length; // the copy of the inputs.meta[inputOrOutput] @@ -471,13 +407,11 @@ class NeuralNetworkData { * applyOneHotEncodingsToDataRaw * does not set this.data.raws * but rather returns them - * @param {*} _dataRaw - * @param {*} _meta */ - applyOneHotEncodingsToDataRaw(dataRaw) { + applyOneHotEncodingsToDataRaw() { const meta = Object.assign({}, this.meta); - const output = dataRaw.map((row) => { + const output = this.data.raw.map((row) => { const xs = { ...row.xs, }; @@ -510,32 +444,21 @@ class NeuralNetworkData { * getDataOneHot * creates onehot encodings for the input and outputs * and adds them to the meta info - * @param {*} dataRaw + * @private + * @void */ - getDataOneHot(dataRaw) { - const meta = Object.assign({}, this.meta); - - const inputMeta = this.getInputMetaOneHot(dataRaw, meta.inputs, "xs"); - const outputMeta = this.getInputMetaOneHot(dataRaw, meta.outputs, "ys"); - - meta.inputs = inputMeta; - meta.outputs = outputMeta; - - this.meta = { - ...this.meta, - ...meta, - }; - - return meta; + getDataOneHot() { + this.meta.inputs = this.getInputMetaOneHot(this.meta.inputs, "xs"); + this.meta.outputs = this.getInputMetaOneHot(this.meta.outputs, "ys"); } /** * getOneHotMeta - * @param {*} _inputsMeta - * @param {*} _dataRaw - * @param {*} xsOrYs + * @param {Object} _inputsMeta + * @param {"xs" | "ys"} xsOrYs + * @return {Object} */ - getInputMetaOneHot(_dataRaw, _inputsMeta, xsOrYs) { + getInputMetaOneHot(_inputsMeta, xsOrYs) { const inputsMeta = Object.assign({}, _inputsMeta); Object.entries(inputsMeta).forEach((arr) => { @@ -546,7 +469,7 @@ class NeuralNetworkData { if (dtype === "string") { const uniqueVals = [ - ...new Set(_dataRaw.map((obj) => obj[xsOrYs][key])), + ...new Set(this.data.raw.map((obj) => obj[xsOrYs][key])), ]; const oneHotMeta = this.createOneHotEncodings(uniqueVals); inputsMeta[key] = { @@ -562,6 +485,9 @@ class NeuralNetworkData { /** * Returns a legend mapping the * data values to oneHot encoded values + * @private + * @param {Array} _uniqueValuesArray + * @return {Object} */ // eslint-disable-next-line class-methods-use-this, no-unused-vars createOneHotEncodings(_uniqueValuesArray) { @@ -602,22 +528,20 @@ class NeuralNetworkData { * @param {*} dataUrl * @param {*} inputs * @param {*} outputs + * @void */ async loadDataFromUrl(dataUrl, inputs, outputs) { try { - let result; if (dataUrl.endsWith(".csv")) { - result = await this.loadCSV(dataUrl, inputs, outputs); + await this.loadCSV(dataUrl, inputs, outputs); } else if (dataUrl.endsWith(".json")) { - result = await this.loadJSON(dataUrl, inputs, outputs); + await this.loadJSON(dataUrl, inputs, outputs); } else if (dataUrl.includes("blob")) { - result = await this.loadBlob(dataUrl, inputs, outputs); + await this.loadBlob(dataUrl, inputs, outputs); } else { throw new Error("Not a valid data format. Must be csv or json"); } - - return result; } catch (error) { console.error(error); throw new Error(error); @@ -626,9 +550,10 @@ class NeuralNetworkData { /** * loadJSON - * @param {*} _dataUrlOrJson - * @param {*} _inputLabelsArray - * @param {*} _outputLabelsArray + * @param {*} dataUrlOrJson + * @param {*} inputLabels + * @param {*} outputLabels + * @void */ async loadJSON(dataUrlOrJson, inputLabels, outputLabels) { try { @@ -642,8 +567,7 @@ class NeuralNetworkData { } // format the data.raw array - const result = this.formatRawData(json, inputLabels, outputLabels); - return result; + this.formatRawData(json, inputLabels, outputLabels); } catch (err) { console.error("error loading json"); throw new Error(err); @@ -652,9 +576,10 @@ class NeuralNetworkData { /** * loadCSV - * @param {*} _dataUrl - * @param {*} _inputLabelsArray - * @param {*} _outputLabelsArray + * @param {*} dataUrl + * @param {*} inputLabels + * @param {*} outputLabels + * @void */ async loadCSV(dataUrl, inputLabels, outputLabels) { try { @@ -664,8 +589,7 @@ class NeuralNetworkData { entries: loadedData, }; // format the data.raw array - const result = this.formatRawData(json, inputLabels, outputLabels); - return result; + this.formatRawData(json, inputLabels, outputLabels); } catch (err) { console.error("error loading csv", err); throw new Error(err); @@ -674,25 +598,23 @@ class NeuralNetworkData { /** * loadBlob - * @param {*} _dataUrlOrJson - * @param {*} _inputLabelsArray - * @param {*} _outputLabelsArray + * @param {*} dataUrlOrJson + * @param {*} inputLabels + * @param {*} outputLabels + * @void */ async loadBlob(dataUrlOrJson, inputLabels, outputLabels) { try { const { data } = await axios.get(dataUrlOrJson); const text = data; // await data.text(); - let result; if (nnUtils.isJsonOrString(text)) { const json = JSON.parse(text); - result = await this.loadJSON(json, inputLabels, outputLabels); + await this.loadJSON(json, inputLabels, outputLabels); } else { const json = this.csvToJSON(text); - result = await this.loadJSON(json, inputLabels, outputLabels); + await this.loadJSON(json, inputLabels, outputLabels); } - - return result; } catch (err) { console.log("mmm might be passing in a string or something!", err); throw new Error(err); @@ -873,8 +795,9 @@ class NeuralNetworkData { * formatRawData * takes a json and set the this.data.raw * @param {*} json - * @param {Array} inputLabels - * @param {Array} outputLabels + * @param {Array} inputLabels + * @param {Array} outputLabels + * @void */ formatRawData(json, inputLabels, outputLabels) { // Recurse through the json object to find @@ -914,8 +837,6 @@ class NeuralNetworkData { // set this.data.raw this.data.raw = result; - - return result; } /** diff --git a/src/NeuralNetwork/index.js b/src/NeuralNetwork/index.js index 00dfd420..d070e851 100644 --- a/src/NeuralNetwork/index.js +++ b/src/NeuralNetwork/index.js @@ -1,15 +1,12 @@ import * as tf from "@tensorflow/tfjs"; +import callCallback from "../utils/callcallback"; import handleArguments from "../utils/handleArguments"; +import { imgToPixelArray, isInstanceOfSupportedElement, } from "../utils/imageUtilities"; import NeuralNetwork from "./NeuralNetwork"; import NeuralNetworkData from "./NeuralNetworkData"; -import NeuralNetworkVis from "./NeuralNetworkVis"; -import callCallback from "../utils/callcallback"; import nnUtils from "./NeuralNetworkUtils"; -import { - imgToPixelArray, - isInstanceOfSupportedElement, -} from "../utils/imageUtilities"; +import NeuralNetworkVis from "./NeuralNetworkVis"; const DEFAULTS = { inputs: [], @@ -130,6 +127,8 @@ class DiyNeuralNetwork { /** * createLayersNoTraining + * @private + * @void */ createLayersNoTraining() { // Create sample data based on options @@ -145,8 +144,9 @@ class DiyNeuralNetwork { this.addData(inputSample, outputSample); } - this.neuralNetworkData.createMetadata(this.neuralNetworkData.data.raw); - this.addDefaultLayers(this.options.task, this.neuralNetworkData.meta); + // TODO: what about inputShape? + this.neuralNetworkData.createMetadata(); + this.addDefaultLayers(); } /** @@ -239,7 +239,7 @@ class DiyNeuralNetwork { async loadDataInternal(options) { const { dataUrl, inputs, outputs } = options; - const data = await this.neuralNetworkData.loadDataFromUrl( + await this.neuralNetworkData.loadDataFromUrl( dataUrl, inputs, outputs @@ -248,9 +248,9 @@ class DiyNeuralNetwork { // once the data are loaded, create the metadata // and prep the data for training // if the inputs are defined as an array of [img_width, img_height, channels] - this.createMetadata(data); + this.createMetaData(); - this.prepareForTraining(data); + this.prepareForTraining(); } /** @@ -259,7 +259,7 @@ class DiyNeuralNetwork { * //////////////////////////////////////////////////////////// */ - createMetaData(dataRaw) { + createMetaData() { const { inputs } = this.options; let inputShape; @@ -270,7 +270,7 @@ class DiyNeuralNetwork { : null; } - this.neuralNetworkData.createMetadata(dataRaw, inputShape); + this.neuralNetworkData.createMetadata(inputShape); } /** @@ -281,46 +281,36 @@ class DiyNeuralNetwork { /** * Prepare data for training by applying oneHot to raw - * @param {*} dataRaw + * @private + * @void */ - prepareForTraining(_dataRaw = null) { - const dataRaw = - _dataRaw === null ? this.neuralNetworkData.data.raw : _dataRaw; - const unnormalizedTrainingData = - this.neuralNetworkData.applyOneHotEncodingsToDataRaw(dataRaw); - this.data.training = unnormalizedTrainingData; + prepareForTraining() { + this.data.training = this.neuralNetworkData.applyOneHotEncodingsToDataRaw(); this.neuralNetworkData.isWarmedUp = true; - - return unnormalizedTrainingData; } /** * normalizeData - * @param {*} _dataRaw - * @param {*} _meta + * @public + * @void */ - normalizeData(_dataRaw = null) { - const dataRaw = - _dataRaw === null ? this.neuralNetworkData.data.raw : _dataRaw; - + normalizeData() { if (!this.neuralNetworkData.isMetadataReady) { // if the inputs are defined as an array of [img_width, img_height, channels] - this.createMetaData(dataRaw); + this.createMetaData(); } if (!this.neuralNetworkData.isWarmedUp) { - this.prepareForTraining(dataRaw); + this.prepareForTraining(); } - const trainingData = this.neuralNetworkData.normalizeDataRaw(dataRaw); + const trainingData = this.neuralNetworkData.normalizeDataRaw(); // set this equal to the training data this.data.training = trainingData; // set isNormalized to true this.neuralNetworkData.meta.isNormalized = true; - - return trainingData; } /** @@ -328,6 +318,7 @@ class DiyNeuralNetwork { * @param {*} value * @param {*} _key * @param {*} _meta + * @return {number} */ // eslint-disable-next-line class-methods-use-this normalizeInput(value, _key, _meta) { @@ -339,6 +330,7 @@ class DiyNeuralNetwork { /** * search though the xInputs and format for adding to data.raws * @param {*} input + * @return */ searchAndFormat(input) { let formattedInputs; @@ -358,6 +350,7 @@ class DiyNeuralNetwork { /** * Returns either the original input or a pixelArray[] * @param {*} input + * @return */ // eslint-disable-next-line class-methods-use-this formatInputItem(input) { @@ -388,15 +381,11 @@ class DiyNeuralNetwork { /** * convertTrainingDataToTensors - * @param {*} _trainingData - * @param {*} _meta + * @private + * @return {{ inputs: tf.Tensor, outputs: tf.Tensor }} */ - convertTrainingDataToTensors(_trainingData = null, _meta = null) { - const trainingData = - _trainingData === null ? this.data.training : _trainingData; - const meta = _meta === null ? this.neuralNetworkData.meta : _meta; - - return this.neuralNetworkData.convertRawToTensors(trainingData, meta); + convertTrainingDataToTensors() { + return this.neuralNetworkData.convertRawToTensors(this.data.training); } /** @@ -404,11 +393,14 @@ class DiyNeuralNetwork { * this means applying onehot or normalization * so that the user can use original data units rather * than having to normalize + * @private * @param {*} _input - * @param {*} meta - * @param {*} inputHeaders + * @return {Array} */ - formatInputsForPrediction(_input, meta, inputHeaders) { + formatInputsForPrediction(_input) { + const { meta } = this.neuralNetworkData; + const inputHeaders = Object.keys(meta.inputs); + let inputData = []; // TODO: check to see if it is a nested array @@ -437,26 +429,29 @@ class DiyNeuralNetwork { /** * formatInputsForPredictionAll + * @private * @param {*} _input - * @param {*} meta - * @param {*} inputHeaders + * @return {tf.Tensor} */ - formatInputsForPredictionAll(_input, meta, inputHeaders) { + formatInputsForPredictionAll(_input) { + const { meta } = this.neuralNetworkData; + const inputHeaders = Object.keys(meta.inputs); + let output; if (_input instanceof Array) { if (_input.every((item) => Array.isArray(item))) { output = _input.map((item) => { - return this.formatInputsForPrediction(item, meta, inputHeaders); + return this.formatInputsForPrediction(item); }); return tf.tensor(output, [_input.length, inputHeaders.length]); } - output = this.formatInputsForPrediction(_input, meta, inputHeaders); + output = this.formatInputsForPrediction(_input); return tf.tensor([output]); } - output = this.formatInputsForPrediction(_input, meta, inputHeaders); + output = this.formatInputsForPrediction(_input); return tf.tensor([output]); } @@ -491,6 +486,7 @@ class DiyNeuralNetwork { /** * train + * @public * @param {*} optionsOrCallback * @param {*} optionsOrWhileTraining * @param {*} callback @@ -572,12 +568,12 @@ class DiyNeuralNetwork { // if metadata needs to be generated about the data if (!this.neuralNetworkData.isMetadataReady) { // if the inputs are defined as an array of [img_width, img_height, channels] - this.createMetaData(this.neuralNetworkData.data.raw); + this.createMetaData(); } // if the data still need to be summarized, onehotencoded, etc if (!this.neuralNetworkData.isWarmedUp) { - this.prepareForTraining(this.neuralNetworkData.data.raw); + this.prepareForTraining(); } // if inputs and outputs are not specified @@ -592,19 +588,17 @@ class DiyNeuralNetwork { // check to see if layers are passed into the constructor // then use those to create your architecture if (!this.neuralNetwork.isLayered) { + // TODO: don't update this.options.layers - Linda this.options.layers = this.createNetworkLayers( - this.options.layers, - this.neuralNetworkData.meta + this.options.layers ); } // if the model does not have any layers defined yet // then use the default structure if (!this.neuralNetwork.isLayered) { - this.options.layers = this.addDefaultLayers( - this.options.task, - this.neuralNetworkData.meta - ); + // TODO: don't update this.options.layers - Linda + this.options.layers = this.addDefaultLayers(); } if (!this.neuralNetwork.isCompiled) { @@ -618,19 +612,22 @@ class DiyNeuralNetwork { /** * addLayer - * @param {*} options + * @param {tf.Layer} layer */ - addLayer(options) { - this.neuralNetwork.addLayer(options); + addLayer(layer) { + this.neuralNetwork.addLayer(layer); } /** * add custom layers in options + * @private + * @param {Array} layerJsonArray + * @returns // TODO: make void */ - createNetworkLayers(layerJsonArray, meta) { + createNetworkLayers(layerJsonArray) { const layers = [...layerJsonArray]; - const { inputUnits, outputUnits } = Object.assign({}, meta); + const { inputUnits, outputUnits } = this.neuralNetworkData.meta; const layersLength = layers.length; if (!(layers.length >= 2)) { @@ -687,10 +684,12 @@ class DiyNeuralNetwork { /** * addDefaultLayers - * @param {*} _task + * @private + * @returns // TODO: make void */ - addDefaultLayers(task, meta) { + addDefaultLayers() { let layers; + const task = this.options.task; switch (task.toLowerCase()) { // if the task is classification case "classification": @@ -706,7 +705,7 @@ class DiyNeuralNetwork { }, ]; - return this.createNetworkLayers(layers, meta); + return this.createNetworkLayers(layers); // if the task is regression case "regression": layers = [ @@ -720,7 +719,7 @@ class DiyNeuralNetwork { activation: "sigmoid", }, ]; - return this.createNetworkLayers(layers, meta); + return this.createNetworkLayers(layers); // if the task is imageClassification case "imageclassification": layers = [ @@ -759,7 +758,7 @@ class DiyNeuralNetwork { activation: "softmax", }, ]; - return this.createNetworkLayers(layers, meta); + return this.createNetworkLayers(layers); default: console.log("no imputUnits or outputUnits defined"); @@ -774,25 +773,21 @@ class DiyNeuralNetwork { activation: "sigmoid", }, ]; - return this.createNetworkLayers(layers, meta); + return this.createNetworkLayers(layers); } } /** * compile the model - * @param {*} _options + * @private + * @void */ - compile(_modelOptions = null, _learningRate = null) { - const LEARNING_RATE = - _learningRate === null ? this.options.learningRate : _learningRate; + compile() { + const LEARNING_RATE = this.options.learningRate; let options = {}; - if (_modelOptions !== null) { - options = { - ..._modelOptions, - }; - } else if ( + if ( this.options.task === "classification" || this.options.task === "imageClassification" ) { @@ -894,9 +889,8 @@ class DiyNeuralNetwork { */ predictSyncInternal(_input) { const { meta } = this.neuralNetworkData; - const headers = Object.keys(meta.inputs); - const inputData = this.formatInputsForPredictionAll(_input, meta, headers); + const inputData = this.formatInputsForPredictionAll(_input); const unformattedResults = this.neuralNetwork.predictSync(inputData); inputData.dispose(); @@ -954,9 +948,8 @@ class DiyNeuralNetwork { */ async predictInternal(_input) { const { meta } = this.neuralNetworkData; - const headers = Object.keys(meta.inputs); - const inputData = this.formatInputsForPredictionAll(_input, meta, headers); + const inputData = this.formatInputsForPredictionAll(_input); const unformattedResults = await this.neuralNetwork.predict(inputData); inputData.dispose(); @@ -1042,7 +1035,7 @@ class DiyNeuralNetwork { inputData = tf.tensor([inputData], [1, ...meta.inputUnits]); } else { - inputData = this.formatInputsForPredictionAll(_input, meta, headers); + inputData = this.formatInputsForPredictionAll(_input); } const unformattedResults = this.neuralNetwork.classifySync(inputData); @@ -1110,7 +1103,7 @@ class DiyNeuralNetwork { inputData = tf.tensor([inputData], [1, ...meta.inputUnits]); } else { - inputData = this.formatInputsForPredictionAll(_input, meta, headers); + inputData = this.formatInputsForPredictionAll(_input); } const unformattedResults = await this.neuralNetwork.classify(inputData); From 51846a370a0915219ca116b5cf74d6afc258ffdd Mon Sep 17 00:00:00 2001 From: Linda Paiste Date: Sat, 16 Mar 2024 20:56:50 -0500 Subject: [PATCH 3/3] Move task-dependent logic into a separate file. --- src/NeuralNetwork/NeuralNetwork.js | 34 +--- src/NeuralNetwork/getTask.js | 223 +++++++++++++++++++++ src/NeuralNetwork/index.js | 305 ++++++----------------------- 3 files changed, 284 insertions(+), 278 deletions(-) create mode 100644 src/NeuralNetwork/getTask.js diff --git a/src/NeuralNetwork/NeuralNetwork.js b/src/NeuralNetwork/NeuralNetwork.js index c55011a8..63ef5159 100644 --- a/src/NeuralNetwork/NeuralNetwork.js +++ b/src/NeuralNetwork/NeuralNetwork.js @@ -14,18 +14,6 @@ class NeuralNetwork { */ this.model = null; - // methods - this.init = this.init.bind(this); - this.createModel = this.createModel.bind(this); - this.addLayer = this.addLayer.bind(this); - this.compile = this.compile.bind(this); - this.setOptimizerFunction = this.setOptimizerFunction.bind(this); - this.train = this.train.bind(this); - this.predict = this.predict.bind(this); - this.classify = this.classify.bind(this); - this.save = this.save.bind(this); - this.load = this.load.bind(this); - // initialize this.init(); } @@ -56,7 +44,7 @@ class NeuralNetwork { /** * add layer to the model * if the model has 2 or more layers switch the isLayered flag - * @param {tf.Layer} layer + * @param {tf.layers.Layer} layer * @void */ addLayer(layer) { @@ -70,27 +58,17 @@ class NeuralNetwork { /** * Compile the model - * if the model is compiled, set the isCompiled flag to true - * @param {*} _modelOptions + * once the model is compiled, set the isCompiled flag to true + * @param {tf.ModelCompileArgs} compileOptions */ - compile(_modelOptions) { - this.model.compile(_modelOptions); + compile(compileOptions) { + this.model.compile(compileOptions); this.isCompiled = true; } - /** - * Set the optimizer function given the learning rate - * as a parameter - * @param {*} learningRate - * @param {*} optimizer - */ - setOptimizerFunction(learningRate, optimizer) { - return optimizer.call(this, learningRate); - } - /** * Train the model - * @param {Object} _options + * @param {tf.ModelFitArgs & { inputs: tf.Tensor, outputs: tf.Tensor, whileTraining: Array }} _options */ async train(_options) { const TRAINING_OPTIONS = _options; diff --git a/src/NeuralNetwork/getTask.js b/src/NeuralNetwork/getTask.js new file mode 100644 index 00000000..7f9a53ea --- /dev/null +++ b/src/NeuralNetwork/getTask.js @@ -0,0 +1,223 @@ +import * as tf from '@tensorflow/tfjs'; + +/** + * Separate all task-dependent logic into separate task objects to minimize if/else + * behavior in the main Neural Network class and make it easier to potentially add + * more tasks in the future. + * May want these to be classes which get the nn instance in the constructor. + */ + +/** + * @typedef {'classification' | 'regression' | 'imageClassification'} TaskName + */ + + +/** + * A LayerJson object contains the arguments of the tf.layers function + * and a `type` property with the name of the function. + * + * @typedef {Object} LayerJson + * @property {string} type + */ + +/** + * @typedef {Object} NNTask + * Defines all behavior which varies based on the task. + * + * @property {TaskName} name + * + * @property {() => Object} [getDefaultOptions] - Optional. + * Override any of the default neural network options with defaults that are + * specific to this task. + * + * @property {(learningRate: number) => tf.ModelCompileArgs} getCompileOptions + * This function will be called before compiling the model and should return + * the compile options for this task (optimizer, loss, and [optional] metrics). + * Receives the `learningRate` as an argument. + * Note: learningRate is always the first arg of the optimizer, but some + * optimizers support other optional args as well + * + * @property {(inputShape: tf.Shape, hiddenUnits: number, outputUnits: number) => LayerJson[]} createLayers + * Function to create the standard layers for this task. + * Will receive the inputShape, hiddenUnits, and outputUnits from the neural network. + * + * @property {( + * inputs: number | string[] | number[], + * outputs: number | string[] + * ) => { xs: number[], ys: (string | number)[] }[] + * } getSampleData + * Function to create empty training data for use with neuro-evolution. + * Should return an array of objects with properties xs and ys. + * Receives the inputs and the outputs from the options of the neural network. + * + * TODO: parseInputs and parseOutputs + */ + +// TODO: move elsewhere +function isStringArray(value) { + return Array.isArray(value) && value.some(v => typeof v === 'string'); +} + +/** + * Handling of input sample is the same for all tasks. + * @param {number | string[] | number[]} inputs + * @return {number[]} + */ +function getSampleInput(inputs) { + if (isStringArray(inputs)) { + throw new Error(`'inputs' cannot be an array of property names when using option 'noTraining'. You must specify the number of inputs.`); + } + const inputSize = Array.isArray(inputs) ? inputs.reduce((a, b) => a * b) : inputs; + return new Array(inputSize).fill(0); +} + +/** + * @type NNTask + */ +const classificationTask = { + name: 'classification', + getCompileOptions(learningRate) { + return { + loss: 'categoricalCrossentropy', + optimizer: tf.train.sgd(learningRate), + metrics: ['accuracy'], + } + }, + createLayers(inputShape, hiddenUnits, outputUnits) { + return [ + { + type: 'dense', + units: hiddenUnits, + activation: 'relu', + inputShape + }, + { + type: 'dense', + activation: 'softmax', + units: outputUnits, + }, + ]; + }, + getSampleData(inputs, outputs) { + if (!isStringArray(outputs)) { + throw new Error(`Invalid outputs ${outputs}. Outputs must be an array of label names when using option 'noTraining' with task 'classification'.`); + } + const xs = getSampleInput(inputs); + return outputs.map(label => ({ xs, ys: [label] })); + } +} + +/** + * @type NNTask + */ +const imageClassificationTask = { + name: 'imageClassification', + getDefaultOptions() { + return { + learningRate: 0.02 + } + }, + getCompileOptions: classificationTask.getCompileOptions, + createLayers(inputShape, hiddenUnits, outputUnits) { + return [ + { + type: 'conv2d', + filters: 8, + kernelSize: 5, + strides: 1, + activation: 'relu', + kernelInitializer: 'varianceScaling', + inputShape, + }, + { + type: 'maxPooling2d', + poolSize: [2, 2], + strides: [2, 2], + }, + { + type: 'conv2d', + filters: 16, + kernelSize: 5, + strides: 1, + activation: 'relu', + kernelInitializer: 'varianceScaling', + }, + { + type: 'maxPooling2d', + poolSize: [2, 2], + strides: [2, 2], + }, + { + type: 'flatten', + }, + { + type: 'dense', + kernelInitializer: 'varianceScaling', + activation: 'softmax', + units: outputUnits, + }, + ]; + }, + getSampleData: classificationTask.getSampleData +} + +/** + * @type NNTask + */ +const regressionTask = { + name: 'regression', + getCompileOptions(learningRate) { + return { + loss: 'meanSquaredError', + optimizer: tf.train.adam(learningRate), + metrics: ['accuracy'], + }; + }, + createLayers(inputShape, hiddenUnits, outputUnits) { + return [ + { + type: 'dense', + units: hiddenUnits, + activation: 'relu', + inputShape + }, + { + type: 'dense', + activation: 'sigmoid', + units: outputUnits, + }, + ]; + }, + getSampleData(inputs, outputs) { + if (typeof outputs !== 'number') { + throw new Error(`Invalid outputs ${outputs}. Outputs must be a number when using option 'noTraining' with task 'regression'.`); + } + return [{ + xs: getSampleInput(inputs), + ys: new Array(outputs).fill(0) + }] + } +} + +/** + * Mapping of supported task configurations and their task names. + * Use lowercase keys to make the lookup case-insensitive. + */ +const TASKS = { + regression: regressionTask, + classification: classificationTask, + imageclassification: imageClassificationTask, +} + +/** + * Get the correct task object based on the task name. + * @param {TaskName | string} name + * @return {NNTask} + */ +export default function getTask(name) { + const task = TASKS[name.toLowerCase()]; + if (!task) { + throw new Error(`Unknown task name '${name}'. Task must be one of ${Object.keys(TASKS).join(', ')}`); + } + return task; +} diff --git a/src/NeuralNetwork/index.js b/src/NeuralNetwork/index.js index 6dc63416..82240380 100644 --- a/src/NeuralNetwork/index.js +++ b/src/NeuralNetwork/index.js @@ -2,6 +2,7 @@ import * as tf from "@tensorflow/tfjs"; import callCallback from "../utils/callcallback"; import handleArguments from "../utils/handleArguments"; import { imgToPixelArray, isInstanceOfSupportedElement, } from "../utils/imageUtilities"; +import getTask from './getTask'; import NeuralNetwork from "./NeuralNetwork"; import NeuralNetworkData from "./NeuralNetworkData"; @@ -20,20 +21,17 @@ const DEFAULTS = { hiddenUnits: 16, neuroEvolution: false, }; + class DiyNeuralNetwork { constructor(options, callback) { - // Is there a better way to handle a different - // default learning rate for image classification tasks? - if (options.task === "imageClassification") { - DEFAULTS.learningRate = 0.02; - } + this.task = getTask(options.task || 'regression'); - this.options = - { - ...DEFAULTS, - ...options, - } || DEFAULTS; + this.options = { + ...DEFAULTS, + ...this.task.getDefaultOptions?.(), + ...options, + } this.neuralNetwork = new NeuralNetwork(); this.neuralNetworkData = new NeuralNetworkData(); @@ -43,51 +41,19 @@ class DiyNeuralNetwork { training: [], }; - // Methods - this.init = this.init.bind(this); - // adding data + // Public Methods this.addData = this.addData.bind(this); - this.loadDataFromUrl = this.loadDataFromUrl.bind(this); - // metadata prep - this.createMetaData = this.createMetaData.bind(this); - // data prep and handling - this.prepareForTraining = this.prepareForTraining.bind(this); this.normalizeData = this.normalizeData.bind(this); - this.normalizeInput = this.normalizeInput.bind(this); - this.searchAndFormat = this.searchAndFormat.bind(this); - this.formatInputItem = this.formatInputItem.bind(this); - this.convertTrainingDataToTensors = - this.convertTrainingDataToTensors.bind(this); - this.formatInputsForPrediction = this.formatInputsForPrediction.bind(this); - this.formatInputsForPredictionAll = - this.formatInputsForPredictionAll.bind(this); - this.isOneHotEncodedOrNormalized = - this.isOneHotEncodedOrNormalized.bind(this); - // model prep this.train = this.train.bind(this); - this.trainInternal = this.trainInternal.bind(this); - this.addLayer = this.addLayer.bind(this); - this.createNetworkLayers = this.createNetworkLayers.bind(this); - this.addDefaultLayers = this.addDefaultLayers.bind(this); - this.compile = this.compile.bind(this); - // prediction / classification this.predict = this.predict.bind(this); this.predictMultiple = this.predictMultiple.bind(this); this.classify = this.classify.bind(this); this.classifyMultiple = this.classifyMultiple.bind(this); - this.predictInternal = this.predictInternal.bind(this); - this.classifyInternal = this.classifyInternal.bind(this); - // save / load data this.saveData = this.saveData.bind(this); this.loadData = this.loadData.bind(this); - // save / load model this.save = this.save.bind(this); this.load = this.load.bind(this); - - // release model this.dispose = this.dispose.bind(this); - - // neuroevolution this.mutate = this.mutate.bind(this); this.crossover = this.crossover.bind(this); @@ -111,10 +77,10 @@ class DiyNeuralNetwork { if (this.options.neuroEvolution === true) { this.createLayersNoTraining(); } - if (this.options.dataUrl) { await this.loadDataFromUrl(); - } else if (this.options.modelUrl) { + } + if (this.options.modelUrl) { // will take a URL to model.json, an object, or files array await this.load(this.options.modelUrl); } @@ -122,26 +88,21 @@ class DiyNeuralNetwork { } /** - * createLayersNoTraining - * @private + * Creates and adds layers for use with neuroEvolution. + * @private - called by init. * @void */ createLayersNoTraining() { // Create sample data based on options - const { inputs, outputs, task } = this.options; - if (task === "classification") { - for (let i = 0; i < outputs.length; i += 1) { - const inputSample = new Array(inputs).fill(0); - this.addData(inputSample, [outputs[i]]); - } - } else { - const inputSample = new Array(inputs).fill(0); - const outputSample = new Array(outputs).fill(0); - this.addData(inputSample, outputSample); - } + const { inputs, outputs } = this.options; + const data = this.task.getSampleData(inputs, outputs); + data.forEach(({ xs, ys }) => { + this.addData(xs, ys); + }); // TODO: what about inputShape? this.neuralNetworkData.createMetadata(); + // TODO: what if the user specifies options.layers? this.addDefaultLayers(); } @@ -577,18 +538,13 @@ class DiyNeuralNetwork { // check to see if layers are passed into the constructor // then use those to create your architecture + // or use the default layers for the task if (!this.neuralNetwork.isLayered) { - // TODO: don't update this.options.layers - Linda - this.options.layers = this.createNetworkLayers( - this.options.layers - ); - } - - // if the model does not have any layers defined yet - // then use the default structure - if (!this.neuralNetwork.isLayered) { - // TODO: don't update this.options.layers - Linda - this.options.layers = this.addDefaultLayers(); + if (this.options.layers && this.options.layers.length > 2) { + this.createNetworkLayers(this.options.layers); + } else { + this.addDefaultLayers(); + } } if (!this.neuralNetwork.isCompiled) { @@ -601,206 +557,55 @@ class DiyNeuralNetwork { } /** - * addLayer - * @param {tf.Layer} layer + * @private - called by createNetworkLayers and addDefaultLayers + * Replace all layers of the model + * @param {Array} layers - array of JSON objects */ - addLayer(layer) { - this.neuralNetwork.addLayer(layer); + setLayers(layers) { + layers.forEach(layer => { + this.neuralNetwork.addLayer(tf.layers[layer.type](layer)); + }); } /** - * add custom layers in options - * @private - * @param {Array} layerJsonArray - * @returns // TODO: make void + * Create and custom layers from the user's options. + * Modifies the first and last layers to add the input and output shape, + * if not already set, without mutating the original objects. + * @private - called during training + * @param {Array} layerJsonArray + * @void */ createNetworkLayers(layerJsonArray) { - const layers = [...layerJsonArray]; - const { inputUnits, outputUnits } = this.neuralNetworkData.meta; - const layersLength = layers.length; - - if (!(layers.length >= 2)) { - return false; - } - - // set the inputShape - layers[0].inputShape = layers[0].inputShape - ? layers[0].inputShape - : inputUnits; - // set the output units - const lastIndex = layersLength - 1; - const lastLayer = layers[lastIndex]; - lastLayer.units = lastLayer.units ? lastLayer.units : outputUnits; - - layers.forEach((layer) => { - this.addLayer(tf.layers[layer.type](layer)); - }); - - return layers; + const first = layerJsonArray[0]; + const last = layerJsonArray[layerJsonArray.length - 1]; + const layers = [ + first.inputShape ? first : { ...first, inputShape: inputUnits }, + ...layerJsonArray.slice(1,-1), + last.units ? last : { ...last, units: outputUnits } + ]; + this.setLayers(layers); } - // /** - // * createDenseLayer - // * @param {*} _options - // */ - // // eslint-disable-next-line class-methods-use-this - // createDenseLayer(_options) { - // const options = Object.assign({}, { - // units: this.options.hiddenUnits, - // activation: 'relu', - // ..._options - // }); - // return tf.layers.dense(options); - // } - - // /** - // * createConv2dLayer - // * @param {*} _options - // */ - // // eslint-disable-next-line class-methods-use-this - // createConv2dLayer(_options) { - // const options = Object.assign({}, { - // kernelSize: 5, - // filters: 8, - // strides: 1, - // activation: 'relu', - // kernelInitializer: 'varianceScaling', - // ..._options - // }) - - // return tf.layers.conv2d(options); - // } - /** - * addDefaultLayers - * @private - * @returns // TODO: make void + * @private -- called by train and createLayersNoTraining + * Create and add the standard layers for the current task. + * @void */ addDefaultLayers() { - let layers; - const task = this.options.task; - switch (task.toLowerCase()) { - // if the task is classification - case "classification": - layers = [ - { - type: "dense", - units: this.options.hiddenUnits, - activation: "relu", - }, - { - type: "dense", - activation: "softmax", - }, - ]; - - return this.createNetworkLayers(layers); - // if the task is regression - case "regression": - layers = [ - { - type: "dense", - units: this.options.hiddenUnits, - activation: "relu", - }, - { - type: "dense", - activation: "sigmoid", - }, - ]; - return this.createNetworkLayers(layers); - // if the task is imageClassification - case "imageclassification": - layers = [ - { - type: "conv2d", - filters: 8, - kernelSize: 5, - strides: 1, - activation: "relu", - kernelInitializer: "varianceScaling", - }, - { - type: "maxPooling2d", - poolSize: [2, 2], - strides: [2, 2], - }, - { - type: "conv2d", - filters: 16, - kernelSize: 5, - strides: 1, - activation: "relu", - kernelInitializer: "varianceScaling", - }, - { - type: "maxPooling2d", - poolSize: [2, 2], - strides: [2, 2], - }, - { - type: "flatten", - }, - { - type: "dense", - kernelInitializer: "varianceScaling", - activation: "softmax", - }, - ]; - return this.createNetworkLayers(layers); - - default: - console.log("no imputUnits or outputUnits defined"); - layers = [ - { - type: "dense", - units: this.options.hiddenUnits, - activation: "relu", - }, - { - type: "dense", - activation: "sigmoid", - }, - ]; - return this.createNetworkLayers(layers); - } + const { inputUnits, outputUnits } = this.neuralNetworkData.meta; + const { hiddenUnits } = this.options; + const layers = this.task.createLayers(inputUnits, hiddenUnits, outputUnits); + this.setLayers(layers); } /** * compile the model - * @private + * @private - called during training * @void */ compile() { - const LEARNING_RATE = this.options.learningRate; - - let options = {}; - - if ( - this.options.task === "classification" || - this.options.task === "imageClassification" - ) { - options = { - loss: "categoricalCrossentropy", - optimizer: tf.train.sgd, - metrics: ["accuracy"], - }; - } else if (this.options.task === "regression") { - options = { - loss: "meanSquaredError", - optimizer: tf.train.adam, - metrics: ["accuracy"], - }; - } - - options.optimizer = options.optimizer - ? this.neuralNetwork.setOptimizerFunction( - LEARNING_RATE, - options.optimizer - ) - : this.neuralNetwork.setOptimizerFunction(LEARNING_RATE, tf.train.sgd); - + const options = this.task.getCompileOptions(this.options.learningRate); this.neuralNetwork.compile(options); // if debug mode is true, then show the model summary