Skip to content

Instantly share code, notes, and snippets.

@padenot
Last active March 19, 2025 14:51
Show Gist options
  • Save padenot/c4ca3491a08c28a58c0a515eeae9916a to your computer and use it in GitHub Desktop.
Save padenot/c4ca3491a08c28a58c0a515eeae9916a to your computer and use it in GitHub Desktop.
onnx.webidl
/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/.
*
* The origin of this IDL file is
* https://webaudio.github.io/web-audio-api/
*
* Copyright © 2012 W3C® (MIT, ERCIM, Keio), All Rights Reserved. W3C
* liability, trademark and document use rules apply.
*/
// Data Type Mapping
dictionary TensorDataTypeMap {
Float32Array float32;
Uint8Array uint8;
Int8Array int8;
Uint16Array uint16;
Int16Array int16;
Int32Array int32;
BigInt64Array int64;
sequence<DOMString> string;
Uint8Array bool;
Uint16Array float16;
Float64Array float64;
Uint32Array uint32;
BigUint64Array uint64;
Uint8Array uint4;
Int8Array int4;
};
// GPU and ML Types
[Exposed=(DedicatedWorker,Window), ChromeOnly]
interface GpuBuffer {
readonly attribute unsigned long size;
readonly attribute DOMString mapState; // "unmapped" | "pending" | "mapped"
};
[Exposed=(DedicatedWorker,Window), ChromeOnly]
interface MLTensor {
undefined destroy();
};
// ONNX Value
typedef Tensor OnnxValue;
typedef TensorDataLocation OnnxValueDataLocation;
// Tensor Representation
[Exposed=(DedicatedWorker,Window), ChromeOnly]
interface Tensor {
[Cached, Pure]
readonly attribute sequence<long> dims;
readonly attribute DOMString type;
readonly attribute any data;
readonly attribute TensorDataLocation location;
readonly attribute WebGLTexture texture;
readonly attribute GpuBuffer gpuBuffer;
readonly attribute MLTensor mlTensor;
Promise<any> getData(optional boolean releaseData);
undefined dispose();
};
// Tensor Data Location
enum TensorDataLocation {
"none",
"cpu",
"cpu-pinned",
"texture",
"gpu-buffer",
"ml-tensor"
};
// Execution Provider Config
typedef any ExecutionProviderConfig;
// Tensor Element Type Mapping
dictionary TensorElementTypeMap {
double float32;
octet uint8;
byte int8;
unsigned short uint16;
short int16;
long int32;
long long int64;
DOMString string;
boolean bool;
double float16;
double float64;
unsigned long uint32;
unsigned long long uint64;
unsigned short uint4;
short int4;
};
// Input/Output types
typedef record<DOMString, OnnxValue> InferenceSessionOnnxValueMapType;
typedef record<DOMString, OnnxValue?> InferenceSessionNullableOnnxValueMapType;
typedef InferenceSessionOnnxValueMapType InferenceSessionFeedsType;
typedef (sequence<DOMString> or InferenceSessionNullableOnnxValueMapType) InferenceSessionFetchesType;
typedef InferenceSessionOnnxValueMapType InferenceSessionReturnType;
// Session Options
dictionary InferenceSessionRunOptions {
unsigned short logSeverityLevel = 0; // 0 - 4
unsigned long logVerbosityLevel = 0;
boolean terminate = true;
DOMString tag = "";
// record<DOMString, any> extra = {};
};
dictionary InferenceSessionSessionOptions {
required sequence<ExecutionProviderConfig> executionProviders;
unsigned long intraOpNumThreads = 1;
unsigned long interOpNumThreads = 1;
record<DOMString, unsigned long> freeDimensionOverrides;
DOMString graphOptimizationLevel = "basic"; // "disabled" | "basic" | "extended" | "all"
boolean enableCpuMemArena = true;
boolean enableMemPattern = true;
DOMString executionMode = "parallel"; // "sequential" | "parallel"
DOMString optimizedModelFilePath = "";
boolean enableProfiling = false;
DOMString profileFilePrefix = "";
DOMString logId = "";
unsigned short logSeverityLevel = 0; // 0 - 4
unsigned long logVerbosityLevel = 0;
(OnnxValueDataLocation or record<DOMString, OnnxValueDataLocation>) preferredOutputLocation;
boolean enableGraphCapture = false;
record<DOMString, any> extra;
};
[Exposed=(DedicatedWorker,Window), ChromeOnly]
interface InferenceSession {
Promise<InferenceSessionReturnType> run(InferenceSessionFeedsType feeds,
optional InferenceSessionRunOptions options = {});
// Promise<InferenceSessionReturnType> run(InferenceSessionFeedsType feeds, InferenceSessionFetchesType fetches, optional InferenceSessionRunOptions options);
Promise<undefined> release();
undefined startProfiling();
undefined endProfiling();
[Cached, Pure]
readonly attribute sequence<DOMString> inputNames;
[Cached, Pure]
readonly attribute sequence<DOMString> outputNames;
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment