Created
April 23, 2019 11:23
-
-
Save Wheest/56d41fa210cfe7bc97ffd6d8f4173db3 to your computer and use it in GitHub Desktop.
tvm onnx import error, using PyTorch models.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| { | |
| "cells": [ | |
| { | |
| "cell_type": "code", | |
| "execution_count": 1, | |
| "metadata": {}, | |
| "outputs": [], | |
| "source": [ | |
| "import torch \n", | |
| "import torch.nn as nn\n", | |
| "import torchvision\n", | |
| "import torchvision.transforms as transforms\n", | |
| "from torch.autograd import Variable\n", | |
| "import numpy as np\n", | |
| "import torch.onnx\n", | |
| "\n", | |
| "import tvm\n", | |
| "import tvm.relay as relay\n", | |
| "from matplotlib import pyplot as plt\n", | |
| "from tvm.contrib import graph_runtime\n", | |
| "import onnx" | |
| ] | |
| }, | |
| { | |
| "cell_type": "markdown", | |
| "metadata": {}, | |
| "source": [ | |
| "### Generate PyTorch model" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 2, | |
| "metadata": {}, | |
| "outputs": [], | |
| "source": [ | |
| "class ConvNet(nn.Module):\n", | |
| " def __init__(self):\n", | |
| " super(ConvNet, self).__init__()\n", | |
| " self.layer1 = nn.Sequential(\n", | |
| " nn.Conv2d(in_c, num_filters, kernel_size=kdim, \n", | |
| " stride=stride, padding=padding, bias=False)) \n", | |
| " def forward(self, x):\n", | |
| " out = self.layer1(x)\n", | |
| " return out" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 3, | |
| "metadata": {}, | |
| "outputs": [], | |
| "source": [ | |
| "# set hyperparams\n", | |
| "in_c = 512\n", | |
| "num_filters = 512\n", | |
| "in_h, in_w = 28, 28\n", | |
| "kdim = 3\n", | |
| "groups=1\n", | |
| "stride=1\n", | |
| "padding=0\n", | |
| "in_shape = (1, in_c, in_h, in_w)\n", | |
| "\n", | |
| "test_input = Variable(torch.randn(1, in_c, in_h, in_w))\n" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 4, | |
| "metadata": {}, | |
| "outputs": [], | |
| "source": [ | |
| "# create model, and export to onnx\n", | |
| "save_path = '/tmp/'\n", | |
| "fname = 'mymodel'\n", | |
| "\n", | |
| "save_name = save_path + fname + '.onnx'\n", | |
| "pytorch_model = ConvNet()\n", | |
| "torch.onnx.export(pytorch_model, test_input, save_name)\n" | |
| ] | |
| }, | |
| { | |
| "cell_type": "markdown", | |
| "metadata": {}, | |
| "source": [ | |
| "### Load Model into tvm" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 5, | |
| "metadata": {}, | |
| "outputs": [ | |
| { | |
| "name": "stdout", | |
| "output_type": "stream", | |
| "text": [ | |
| "graph torch-jit-export (\n", | |
| " %0[FLOAT, 1x512x28x28]\n", | |
| ") initializers (\n", | |
| " %1[FLOAT, 512x512x3x3]\n", | |
| ") {\n", | |
| " %2 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [0, 0, 0, 0], strides = [1, 1]](%0, %1)\n", | |
| " return %2\n", | |
| "}\n" | |
| ] | |
| } | |
| ], | |
| "source": [ | |
| "onnx_model = onnx.load(save_name)\n", | |
| "# Check that the IR is well formed\n", | |
| "onnx.checker.check_model(onnx_model)\n", | |
| "# Print a human readable representation of the graph\n", | |
| "print(onnx.helper.printable_graph(onnx_model.graph))\n" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 6, | |
| "metadata": {}, | |
| "outputs": [], | |
| "source": [ | |
| "# create relay dummy input\n", | |
| "test_input = np.random.rand(1, in_c, in_h, in_w)\n", | |
| "data = relay.var(\"data\", relay.TensorType(in_shape, \n", | |
| " \"float32\"))\n", | |
| "dtype = 'float32'\n", | |
| "\n", | |
| "x = test_input\n", | |
| "ctx = tvm.cpu()\n" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 7, | |
| "metadata": {}, | |
| "outputs": [], | |
| "source": [ | |
| "# compile with relay\n", | |
| "\n", | |
| "input_name = '1'\n", | |
| "shape_dict = {input_name: x.shape}\n", | |
| "target = 'llvm'\n", | |
| "\n", | |
| "sym, params = relay.frontend.from_onnx(onnx_model, shape_dict)\n", | |
| "\n", | |
| "with relay.build_config(opt_level=1):\n", | |
| " intrps = relay.build_module.create_executor('graph', sym, \n", | |
| " tvm.cpu(0), target)" | |
| ] | |
| }, | |
| { | |
| "cell_type": "code", | |
| "execution_count": 8, | |
| "metadata": {}, | |
| "outputs": [ | |
| { | |
| "ename": "TVMError", | |
| "evalue": "Traceback (most recent call last):\n [bt] (8) /home/wheest/tools/tvm/build/libtvm.so(TVMFuncCall+0x4a) [0x7f3eb62dd95a]\n [bt] (7) /home/wheest/tools/tvm/build/libtvm.so(+0x71b094) [0x7f3eb60b1094]\n [bt] (6) /home/wheest/tools/tvm/build/libtvm.so(tvm::relay::InferType(tvm::relay::Expr const&, tvm::relay::Module const&)+0x2b4) [0x7f3eb60b0634]\n [bt] (5) /home/wheest/tools/tvm/build/libtvm.so(+0x4bd479) [0x7f3eb5e53479]\n [bt] (4) /home/wheest/tools/tvm/build/libtvm.so(+0x4bbf53) [0x7f3eb5e51f53]\n [bt] (3) /home/wheest/tools/tvm/build/libtvm.so(tvm::relay::InferType(tvm::relay::Function const&, tvm::relay::Module const&, tvm::relay::GlobalVar const&)+0x1a2) [0x7f3eb60b0b32]\n [bt] (2) /home/wheest/tools/tvm/build/libtvm.so(+0x71a003) [0x7f3eb60b0003]\n [bt] (1) /home/wheest/tools/tvm/build/libtvm.so(+0x48ca1f) [0x7f3eb5e22a1f]\n [bt] (0) /home/wheest/tools/tvm/build/libtvm.so(+0x10d852) [0x7f3eb5aa3852]\n [bt] (8) /home/wheest/tools/tvm/build/libtvm.so(+0x719fe7) [0x7f3eb60affe7]\n [bt] (7) /home/wheest/tools/tvm/build/libtvm.so(+0x73373b) [0x7f3eb60c973b]\n [bt] (6) /home/wheest/tools/tvm/build/libtvm.so(+0x736d5d) [0x7f3eb60ccd5d]\n [bt] (5) /home/wheest/tools/tvm/build/libtvm.so(+0x5089ad) [0x7f3eb5e9e9ad]\n [bt] (4) /home/wheest/tools/tvm/build/libtvm.so(+0x508a51) [0x7f3eb5e9ea51]\n [bt] (3) /home/wheest/tools/tvm/build/libtvm.so(+0x5249a2) [0x7f3eb5eba9a2]\n [bt] (2) /home/wheest/tools/tvm/build/libtvm.so(tvm::BijectiveLayout::ForwardShape(tvm::Array<HalideIR::Expr, void> const&) const+0xbd) [0x7f3eb5c0359d]\n [bt] (1) /home/wheest/tools/tvm/build/libtvm.so(+0x26e7f7) [0x7f3eb5c047f7]\n [bt] (0) /home/wheest/tools/tvm/build/libtvm.so(+0x10d852) [0x7f3eb5aa3852]\n File \"/home/wheest/tools/tvm/src/relay/ir/error.cc\", line 131\nTVMError: \u001b[1m\nError(s) have occurred. We have annotated the program with them:\n\n\u001b[0m\u001b[1mIn `main`: \n\u001b[0mv0.0.1\n%1 = fn (%v0: float32, %v1: Tensor[(512, 512, 3, 3), float32]) {\n %0 = nn.conv2d(%v0, %v1, kernel_size=[3, 3]) // \u001b[31man internal invariant was violdated while typechecking your program [12:20:33] /home/wheest/tools/tvm/src/lang/data_layout.cc:273: Check failed: src_shape.size() == src_axis.size() (0 vs. 4) : \n; \u001b[39m\n %0\n}\n%1\n", | |
| "output_type": "error", | |
| "traceback": [ | |
| "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", | |
| "\u001b[0;31mTVMError\u001b[0m Traceback (most recent call last)", | |
| "\u001b[0;32m<ipython-input-8-258695ce744b>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# execute in tvm\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mtvm_output\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mintrps\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mevaluate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msym\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput_data\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mparams\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masnumpy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", | |
| "\u001b[0;32m~/tools/tvm/python/tvm/relay/backend/interpreter.py\u001b[0m in \u001b[0;36mevaluate\u001b[0;34m(self, expr, binds)\u001b[0m\n\u001b[1;32m 225\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 226\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mexpr\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mFunction\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mGlobalVar\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 227\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_make_executor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mexpr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 228\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 229\u001b[0m \u001b[0;31m# normal expression evaluated by running a function.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", | |
| "\u001b[0;32m~/tools/tvm/python/tvm/relay/build_module.py\u001b[0m in \u001b[0;36m_make_executor\u001b[0;34m(self, func)\u001b[0m\n\u001b[1;32m 428\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 429\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_make_executor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfunc\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 430\u001b[0;31m \u001b[0mgraph_json\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmod\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mparams\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbuild\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfunc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtarget\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtarget\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 431\u001b[0m \u001b[0mgmodule\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_graph_rt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcreate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mgraph_json\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmod\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 432\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mparams\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", | |
| "\u001b[0;32m~/tools/tvm/python/tvm/relay/build_module.py\u001b[0m in \u001b[0;36mbuild\u001b[0;34m(func, target, target_host, params)\u001b[0m\n\u001b[1;32m 290\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 291\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mtophub_context\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 292\u001b[0;31m \u001b[0mfunc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0moptimize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfunc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtarget\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mparams\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 293\u001b[0m \u001b[0;31m# Annotate the ops for heterogeneous execution.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 294\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtarget\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", | |
| "\u001b[0;32m~/tools/tvm/python/tvm/relay/build_module.py\u001b[0m in \u001b[0;36moptimize\u001b[0;34m(func, target, params)\u001b[0m\n\u001b[1;32m 177\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 178\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mcfg\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpass_enabled\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"SimplifyInference\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 179\u001b[0;31m \u001b[0mfunc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mir_pass\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minfer_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfunc\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 180\u001b[0m \u001b[0mfunc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mir_pass\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msimplify_inference\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfunc\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 181\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", | |
| "\u001b[0;32m~/tools/tvm/python/tvm/relay/ir_pass.py\u001b[0m in \u001b[0;36minfer_type\u001b[0;34m(expr, mod)\u001b[0m\n\u001b[1;32m 367\u001b[0m \u001b[0mThe\u001b[0m \u001b[0mchecked\u001b[0m \u001b[0mexpression\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 368\u001b[0m \"\"\"\n\u001b[0;32m--> 369\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0m_ir_pass\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minfer_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mexpr\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmod\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 370\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 371\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", | |
| "\u001b[0;32m~/tools/tvm/python/tvm/_ffi/_ctypes/function.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *args)\u001b[0m\n\u001b[1;32m 204\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalues\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtcodes\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mctypes\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mc_int\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnum_args\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 205\u001b[0m ctypes.byref(ret_val), ctypes.byref(ret_tcode)) != 0:\n\u001b[0;32m--> 206\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mget_last_ffi_error\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 207\u001b[0m \u001b[0m_\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtemp_args\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 208\u001b[0m \u001b[0m_\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", | |
| "\u001b[0;31mTVMError\u001b[0m: Traceback (most recent call last):\n [bt] (8) /home/wheest/tools/tvm/build/libtvm.so(TVMFuncCall+0x4a) [0x7f3eb62dd95a]\n [bt] (7) /home/wheest/tools/tvm/build/libtvm.so(+0x71b094) [0x7f3eb60b1094]\n [bt] (6) /home/wheest/tools/tvm/build/libtvm.so(tvm::relay::InferType(tvm::relay::Expr const&, tvm::relay::Module const&)+0x2b4) [0x7f3eb60b0634]\n [bt] (5) /home/wheest/tools/tvm/build/libtvm.so(+0x4bd479) [0x7f3eb5e53479]\n [bt] (4) /home/wheest/tools/tvm/build/libtvm.so(+0x4bbf53) [0x7f3eb5e51f53]\n [bt] (3) /home/wheest/tools/tvm/build/libtvm.so(tvm::relay::InferType(tvm::relay::Function const&, tvm::relay::Module const&, tvm::relay::GlobalVar const&)+0x1a2) [0x7f3eb60b0b32]\n [bt] (2) /home/wheest/tools/tvm/build/libtvm.so(+0x71a003) [0x7f3eb60b0003]\n [bt] (1) /home/wheest/tools/tvm/build/libtvm.so(+0x48ca1f) [0x7f3eb5e22a1f]\n [bt] (0) /home/wheest/tools/tvm/build/libtvm.so(+0x10d852) [0x7f3eb5aa3852]\n [bt] (8) /home/wheest/tools/tvm/build/libtvm.so(+0x719fe7) [0x7f3eb60affe7]\n [bt] (7) /home/wheest/tools/tvm/build/libtvm.so(+0x73373b) [0x7f3eb60c973b]\n [bt] (6) /home/wheest/tools/tvm/build/libtvm.so(+0x736d5d) [0x7f3eb60ccd5d]\n [bt] (5) /home/wheest/tools/tvm/build/libtvm.so(+0x5089ad) [0x7f3eb5e9e9ad]\n [bt] (4) /home/wheest/tools/tvm/build/libtvm.so(+0x508a51) [0x7f3eb5e9ea51]\n [bt] (3) /home/wheest/tools/tvm/build/libtvm.so(+0x5249a2) [0x7f3eb5eba9a2]\n [bt] (2) /home/wheest/tools/tvm/build/libtvm.so(tvm::BijectiveLayout::ForwardShape(tvm::Array<HalideIR::Expr, void> const&) const+0xbd) [0x7f3eb5c0359d]\n [bt] (1) /home/wheest/tools/tvm/build/libtvm.so(+0x26e7f7) [0x7f3eb5c047f7]\n [bt] (0) /home/wheest/tools/tvm/build/libtvm.so(+0x10d852) [0x7f3eb5aa3852]\n File \"/home/wheest/tools/tvm/src/relay/ir/error.cc\", line 131\nTVMError: \u001b[1m\nError(s) have occurred. We have annotated the program with them:\n\n\u001b[0m\u001b[1mIn `main`: \n\u001b[0mv0.0.1\n%1 = fn (%v0: float32, %v1: Tensor[(512, 512, 3, 3), float32]) {\n %0 = nn.conv2d(%v0, %v1, kernel_size=[3, 3]) // \u001b[31man internal invariant was violdated while typechecking your program [12:20:33] /home/wheest/tools/tvm/src/lang/data_layout.cc:273: Check failed: src_shape.size() == src_axis.size() (0 vs. 4) : \n; \u001b[39m\n %0\n}\n%1\n" | |
| ] | |
| } | |
| ], | |
| "source": [ | |
| "# execute in tvm\n", | |
| "tvm_output = intrps.evaluate(sym)(input_data, **params).asnumpy()\n" | |
| ] | |
| } | |
| ], | |
| "metadata": { | |
| "kernelspec": { | |
| "display_name": "meth", | |
| "language": "python", | |
| "name": "meth" | |
| }, | |
| "language_info": { | |
| "codemirror_mode": { | |
| "name": "ipython", | |
| "version": 3 | |
| }, | |
| "file_extension": ".py", | |
| "mimetype": "text/x-python", | |
| "name": "python", | |
| "nbconvert_exporter": "python", | |
| "pygments_lexer": "ipython3", | |
| "version": "3.5.3" | |
| } | |
| }, | |
| "nbformat": 4, | |
| "nbformat_minor": 2 | |
| } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment