From 9676ea25a850530a411454ab4c6b8e6ceea88788 Mon Sep 17 00:00:00 2001 From: Sudeep Agarwal Date: Wed, 10 May 2023 23:33:35 -0400 Subject: [PATCH 1/3] Update usage of call_tir --- 4_Build_End_to_End_Model.ipynb | 391 ++++++++++++++++++++------------- 1 file changed, 241 insertions(+), 150 deletions(-) diff --git a/4_Build_End_to_End_Model.ipynb b/4_Build_End_to_End_Model.ipynb index feade76..aefd3ac 100644 --- a/4_Build_End_to_End_Model.ipynb +++ b/4_Build_End_to_End_Model.ipynb @@ -1,6 +1,7 @@ { "cells": [ { + "attachments": {}, "cell_type": "markdown", "metadata": { "colab_type": "text", @@ -11,6 +12,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "Mpn1ti5Urdsv" @@ -20,6 +22,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "qXysoqn-vZuF" @@ -65,6 +68,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "i-14C4skxIrJ" @@ -75,6 +79,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "EpXZsatqxnyz" @@ -84,6 +89,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "8k7AtYC0x0jD" @@ -97,6 +103,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "BBIuE2jc1DaU" @@ -110,7 +117,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 11, "metadata": { "id": "BVp0fHyRkYj6" }, @@ -126,6 +133,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "wIIsUXGqpRqV" @@ -139,7 +147,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 55, "metadata": { "id": "NdWS5Jabq-DN" }, @@ -162,6 +170,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "acOCCvmSPaR0" @@ -172,7 +181,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 56, "metadata": { "colab": { "base_uri": "https://localhost:8080/", @@ -183,18 +192,20 @@ }, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "Class: Sandal\n" - ] + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAesAAAGiCAYAAADHpO4FAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAskklEQVR4nO3df3SU1b3v8c8kkAlIEgwxvyBAQCtaIHhAYopSPOQQoIuWylkX0SWQRXFpEy+Q5RHTAgH1mBYrzbKNsGqLtGuJIr2CrXrjoSnBwzLINTaLcpdEiFCiMOGHlwSCJDjz3D8oU6ckMM88M5nnYd4v116LPNl79s7jhC/fvfc822UYhiEAAGBbcdEeAAAAuDqCNQAANkewBgDA5gjWAADYHMEaAACbI1gDAGBzBGsAAGyOYA0AgM0RrAEAsDmCNQAANkewBgDAhPfee0+zZs1Sdna2XC6Xtm/ffs02dXV1+pd/+Re53W7dfPPN2rRpk6k+CdYAAJjQ0dGhvLw8VVdXB1X/8OHD+s53vqN7771XjY2NWrp0qX7wgx/o3XffDbpPFwd5AAAQGpfLpW3btmn27Nk91lm+fLnefvtt7d+/33/t/vvv15kzZ1RTUxNUP32sDjTcfD6fjh07pqSkJLlcrmgPBwBgkmEYOnv2rLKzsxUXF7kJ3AsXLqirq8vy6xiGcUW8cbvdcrvdll9bkurr61VYWBhwraioSEuXLg36NWwXrI8dO6acnJxoDwMAYFFLS4uGDBkSkde+cOGCcocNkOeE1/JrDRgwQOfOnQu4VlFRodWrV1t+bUnyeDzKyMgIuJaRkaH29nZ9+eWX6tev3zVfw3bBOikpSZJ0t2aqj/pGeTQAALO+0kXt1jv+v88joaurS54TXh1uGKbkpNCz9/azPuWO/5taWlqUnJzsvx6urDpcbBesL09F9FFf9XERrAHAcf6+E6o3ljKTk+IsBWv/6yQnBwTrcMrMzFRra2vAtdbWViUnJweVVUsR3A1eXV2t4cOHKzExUfn5+dq7d2+kugIAxCiv4bNcIq2goEC1tbUB13bs2KGCgoKgXyMiwXrLli0qKytTRUWFPvroI+Xl5amoqEgnTpyIRHcAgBjlk2G5mHXu3Dk1NjaqsbFR0qWPZjU2Nuro0aOSpPLycs2fP99f/5FHHtGnn36qJ554QgcOHNCLL76o119/XcuWLQu6z4gE63Xr1mnx4sUqLi7W7bffrg0bNqh///7auHHjFXU7OzvV3t4eUAAACIYvDP+Z9eGHH+qOO+7QHXfcIUkqKyvTHXfcoVWrVkmSjh8/7g/ckpSbm6u3335bO3bsUF5enp5//nn9+te/VlFRUdB9hn3NuqurSw0NDSovL/dfi4uLU2Fhoerr66+oX1lZqTVr1oR7GAAARMSUKVN0tUeUdPd0silTpugvf/lLyH2GPbM+deqUvF5vt9vUPR7PFfXLy8vV1tbmLy0tLeEeEgDgOuU1DMvFCaK+GzycHzwHAMSWUNedv97eCcKeWaelpSk+Pr7bbeqZmZnh7g4AgOte2IN1QkKCxo8fH7BN3efzqba21tQ2dQAArsUnQ14LxSmZdUSmwcvKyrRgwQJNmDBBEydOVFVVlTo6OlRcXByJ7gAAMSpWpsEjEqznzp2rkydPatWqVfJ4PBo3bpxqamqu2HQGAACuLWIbzEpLS1VaWhqplwcAwPKObnaDAwAQYb6/FyvtnSByB40CAICwILMGADjW5V3dVto7AcEaAOBYXuNSsdLeCQjWAADHYs0aAADYApk1AMCxfHLJK5el9k5AsAYAOJbPuFSstHcCpsEBALA5MmsAgGN5LU6DW2nbmwjWAADHipVgzTQ4AAA2R2YNAHAsn+GSz7CwG9xC295EsAYAOBbT4AAAwBbIrAEAjuVVnLwW8k5vGMcSSQRrAIBjGRbXrA3WrAEAiCzWrAEAgC2QWQMAHMtrxMlrWFizdsizwQnWAADH8skln4VJYp+cEa2ZBgcAwObIrAEAjhUrG8wI1gAAx7K+Zs00OAAACAMyawCAY13aYGbhIA+mwQEAiCyfxceNshscAACEBZk1AMCxYmWDGcEaAOBYPsXFxENRCNYAAMfyGi55LZycZaVtb2LNGgAAmyOzBgA4ltfibnAv0+AAAESWz4iTz8IGM59DNpgxDQ4AgM2RWQMAHItpcAAAbM4nazu6feEbSkQxDQ4AgM2RWQMAHMv6Q1GckbMSrAEAjmX9caPOCNbOGCUAADGMzBoA4FicZw0AgM3FyjQ4wRoA4FjWP2ftjGDtjFECABDDyKwBAI7lM1zyWXkoikOOyCRYAwAcy2dxGtwpn7N2xigBAIhhZNYAAMeyfkSmM3JWgjUAwLG8cslr4bPSVtr2Jmf8kwIAgBhGZg0AcCymwQEAsDmvrE1le8M3lIhyxj8pAACIYWTWAADHipVp8LCPcvXq1XK5XAFl1KhR4e4GAAD/QR5WihNEZJTf/OY3dfz4cX/ZvXt3JLoBAMQ44+9HZIZajBDXu6urqzV8+HAlJiYqPz9fe/fuvWr9qqoq3XrrrerXr59ycnK0bNkyXbhwIej+IjIN3qdPH2VmZgZVt7OzU52dnf6v29vbIzEkAADCYsuWLSorK9OGDRuUn5+vqqoqFRUVqampSenp6VfU37x5s5588klt3LhR3/rWt/TJJ59o4cKFcrlcWrduXVB9RiSzPnjwoLKzszVixAg9+OCDOnr0aI91KysrlZKS4i85OTmRGBIA4DoUjWnwdevWafHixSouLtbtt9+uDRs2qH///tq4cWO39d9//31NmjRJDzzwgIYPH65p06Zp3rx518zGvy7swTo/P1+bNm1STU2N1q9fr8OHD+uee+7R2bNnu61fXl6utrY2f2lpaQn3kAAA16nLp25ZKdKlWd2vl6/P+H5dV1eXGhoaVFhY6L8WFxenwsJC1dfXd9vmW9/6lhoaGvzB+dNPP9U777yjmTNnBv1zhn0afMaMGf4/jx07Vvn5+Ro2bJhef/11LVq06Ir6brdbbrc73MMAACBo/zyrW1FRodWrV19R79SpU/J6vcrIyAi4npGRoQMHDnT72g888IBOnTqlu+++W4Zh6KuvvtIjjzyiH/3oR0GPL+If3Ro4cKC+8Y1v6NChQ5HuCgAQY7wWj8i83LalpUXJycn+6+FMIuvq6vTss8/qxRdfVH5+vg4dOqQlS5bo6aef1sqVK4N6jYgH63Pnzqm5uVkPPfRQpLsCAMSYr09lh9pekpKTkwOCdU/S0tIUHx+v1tbWgOutra09bqxeuXKlHnroIf3gBz+QJI0ZM0YdHR16+OGH9eMf/1hxcdf+x0bY16wff/xx7dq1S0eOHNH777+v73//+4qPj9e8efPC3RUAAL0qISFB48ePV21trf+az+dTbW2tCgoKum1z/vz5KwJyfHy8JMkwjKD6DXtm/dlnn2nevHk6ffq0brrpJt19993as2ePbrrppnB3BQCIcT7FyWch7wylbVlZmRYsWKAJEyZo4sSJqqqqUkdHh4qLiyVJ8+fP1+DBg1VZWSlJmjVrltatW6c77rjDPw2+cuVKzZo1yx+0ryXswfq1114L90sCANAtr+GS18I0eCht586dq5MnT2rVqlXyeDwaN26campq/JvOjh49GpBJr1ixQi6XSytWrNDnn3+um266SbNmzdJ//ud/Bt2nywg2B+8l7e3tSklJ0RR9T31cfaM9HACASV8ZF1WnN9XW1hbUOnAoLseKR//7PrkHhB4rOs9d1Pp73ojoWMOBgzwAAI4Vrg1mdkewBgA4lmHx1C3DIQd5EKwBAI7llUveEA/juNzeCZzxTwoAAGIYmTUAwLF8hrV1Z5+ttlj3jGANAHAsn8U1aytte5MzRgkAQAwjswYAOJZPLvksbBKz0rY3EawBAI4VjSeYRQPT4AAA2ByZNQDAsWJlgxnBGgDgWD5ZfNyoQ9asnfFPCgAAYhiZNQDAsQyLu8ENh2TWBGsAgGNx6hYAADYXKxvMnDFKAABiGJk1AMCxmAYHAMDmYuVxo0yDAwBgc2TWAADHYhocAACbi5VgzTQ4AAA2R2YNAHCsWMmsCdYAAMeKlWDNNDgAADZHZg0AcCxD1j4rbYRvKBFFsAYAOFasTIMTrAEAjhUrwZo1awAAbI7MGgDgWLGSWROsAQCOFSvBmmlwAABsjswaAOBYhuGSYSE7ttK2NxGsAQCOxXnWAADAFsisAQCOFSsbzAjWAADHipU1a6bBAQCwOTJrAIBjMQ0OAIDNxco0OMEaAOBYhsXM2inBmjVrAABsjswaAOBYhiTDsNbeCQjWAADH8sklF08wAwAA0UZmDQBwLHaDAwBgcz7DJVcMfM6aaXAAAGyOzBoA4FiGYXE3uEO2gxOsAQCOFStr1kyDAwBgc2TWAADHipXMmmANAHAsdoP34L333tOsWbOUnZ0tl8ul7du3B3zfMAytWrVKWVlZ6tevnwoLC3Xw4MFwjRcAAL/LG8ysFCcwHaw7OjqUl5en6urqbr+/du1avfDCC9qwYYM++OAD3XDDDSoqKtKFCxcsDxYAgFhkehp8xowZmjFjRrffMwxDVVVVWrFihb73ve9Jkn73u98pIyND27dv1/33339Fm87OTnV2dvq/bm9vNzskAECMupQdW1mzDuNgIiisu8EPHz4sj8ejwsJC/7WUlBTl5+ervr6+2zaVlZVKSUnxl5ycnHAOCQBwHbu8wcxKcYKwBmuPxyNJysjICLiekZHh/94/Ky8vV1tbm7+0tLSEc0gAADhe1HeDu91uud3uaA8DAOBAhqydSe2QWfDwZtaZmZmSpNbW1oDrra2t/u8BABAuTIOHIDc3V5mZmaqtrfVfa29v1wcffKCCgoJwdgUAQMwwPQ1+7tw5HTp0yP/14cOH1djYqNTUVA0dOlRLly7VM888o1tuuUW5ublauXKlsrOzNXv27HCOGwCAmJkHNx2sP/zwQ917773+r8vKyiRJCxYs0KZNm/TEE0+oo6NDDz/8sM6cOaO7775bNTU1SkxMNNeRy3WpBMsp++8jLP7GG023OfbQbSH11ee8+XvuC2GXhC/B/DTV4Df+Zr4jSV999nlI7XqFmd8Hq/h9glNYncoOsW11dbWee+45eTwe5eXl6Re/+IUmTpzYY/0zZ87oxz/+sd544w198cUXGjZsmKqqqjRz5syg+jP9V+eUKVNkXOUX2eVy6amnntJTTz1l9qUBADAlGkdkbtmyRWVlZdqwYYPy8/NVVVWloqIiNTU1KT09/Yr6XV1d+rd/+zelp6fr97//vQYPHqy//e1vGjhwYNB9Rn03OAAATrJu3TotXrxYxcXFkqQNGzbo7bff1saNG/Xkk09eUX/jxo364osv9P7776tv376SpOHDh5vqkyMyAQCOFa7d4O3t7QHl60/W/Lquri41NDQEPPwrLi5OhYWFPT786w9/+IMKCgpUUlKijIwMjR49Ws8++6y8Xm/QPyfBGgDgXIbLepGUk5MT8DTNysrKbrs7deqUvF6vqYd/ffrpp/r9738vr9erd955RytXrtTzzz+vZ555Jugfk2lwAEDMa2lpUXJysv/rcD6sy+fzKT09Xb/61a8UHx+v8ePH6/PPP9dzzz2nioqKoF6DYA0AcKxwbTBLTk4OCNY9SUtLU3x8vKmHf2VlZalv376Kj4/3X7vtttvk8XjU1dWlhISEa/bLNDgAwLmMMBQTEhISNH78+ICHf/l8PtXW1vb48K9Jkybp0KFD8vl8/muffPKJsrKyggrUEsEaAABTysrK9NJLL+m3v/2tPv74Yz366KPq6Ojw7w6fP3++ysvL/fUfffRRffHFF1qyZIk++eQTvf3223r22WdVUlISdJ9MgwMAHMvq871DaTt37lydPHlSq1atksfj0bhx41RTU+PfdHb06FHFxf0jF87JydG7776rZcuWaezYsRo8eLCWLFmi5cuXB90nwRoA4GxReOBeaWmpSktLu/1eXV3dFdcKCgq0Z8+ekPtjGhwAAJsjswYAOFY0psGjgWANAHAuTt2KLld8vFyu+GtX9DcwP6NvfHXRdBvp0tjM9/VVSH2Ztfqj/zLd5okf3hxSXzc0nTTdpvLPr5lu8+nFNNNtVrrnm24jSUP+69qfs7zCQfMnfPnOnzffT2+ehBVn/j0uw3ftOuEQwn1w9Qntr7re+r0NSainsF13J6q5/l6stLc/1qwBALA522bWAABcE9PgAADYXIwEa6bBAQCwOTJrAIBzfe2Yy5DbOwDBGgDgWOE6dcvumAYHAMDmyKwBAM4VIxvMCNYAAOeKkTVrpsEBALA5MmsAgGO5jEvFSnsnIFgDAJyLNevoMr76SkaoD6qPsN56uP+QPQNMt/nxp/eZbuP+3//HdBtJCuUuPDnjIdNtTt5l/iCPnP8+brqNJB0vyjLd5sLsG023GXjQ/KEXN35o/uAU7yfNpttIknze0NrZVK8eyBHK31shHETkiuu9vx8Nr9n3g6v3giBr1gAAwA5sm1kDAHBNTIMDAGBzMRKsmQYHAMDmyKwBAM4VI5k1wRoA4FzsBgcAAHZAZg0AcCyeYAYAgN3FyJo10+AAANgcwRoAAJtjGhwA4FguWVyzDttIIsu2wdp1x21yxbuDrn8xJdF0H76+of1v6vfBQdNtXP36mW6zbsg2023+x5AC0216k/dj8/cuNYQ2oR5DkX7ocIgtzfF9+w7TbTpuHWS6TVyu+UNGJClx98em2/g6OkLq67pjhBA5DPPvWMP8WTC9J5R7EHJffHQLAADYgG0zawAArilGdoMTrAEAzhUjwZppcAAAbI7MGgDgWDzBDAAAu2MaHAAA2AGZNQDAuWIksyZYAwAcK1bWrJkGBwDA5sisAQDOFSOPGyVYAwCcizXr6DryP+MU1z8+6PrP/MvvTffx9P/9juk2knRhQa7pNq4QFkbG/ddjptvoZfP95GR/Yb4fSf36XDTdxhfCGTfu+K9MtxnQt9N0G0nyhfCv7BPnk0y3eSDnXdNtDn6ZYbpNXIgLckPdp023KRnYYrpNzfngD+u57Iy3v+k2e86NNN1Gkv76/7JNtznzpflDhdrOmv+ZvGf7mm4jSfEDzP8+Zf2vBFP1v7p4QfrDm6b7CQVr1gAAwBZsm1kDAHBNTIMDAGBzFqfBnRKsTU+Dv/fee5o1a5ays7Plcrm0ffv2gO8vXLhQLpcroEyfPj1c4wUAIOaYDtYdHR3Ky8tTdXV1j3WmT5+u48eP+8urr75qaZAAAHTLCENxANPT4DNmzNCMGTOuWsftdiszMzOo1+vs7FRn5z927ra3t5sdEgAgVsXImnVEdoPX1dUpPT1dt956qx599FGdPt3zx0AqKyuVkpLiLzk5OZEYEgAAjhX2YD19+nT97ne/U21trX76059q165dmjFjhrxeb7f1y8vL1dbW5i8tLeY/qwkAiE2XP2dtpThB2HeD33///f4/jxkzRmPHjtXIkSNVV1enqVOnXlHf7XbL7Tb/YAQAAGJFxB+KMmLECKWlpenQoUOR7goAgOtSxD9n/dlnn+n06dPKysqKdFcAgFgTIxvMTAfrc+fOBWTJhw8fVmNjo1JTU5Wamqo1a9Zozpw5yszMVHNzs5544gndfPPNKioqCuvAAQCIlWeDmw7WH374oe69917/12VlZZKkBQsWaP369dq3b59++9vf6syZM8rOzta0adP09NNPm16XHlb8f9XHFfyD6n/0s3mmXl+Sls1823QbSbqn/0HTbUI5IKLDMP+g/oFx5g+wOOm9wXQbSeowzD3cX5I8FweabtMWwsENrReTTbeRpC+95u95Uh/z97zm5DdNt7kQwthCed9J0keG+U9l/OF4nuk2N4Rw4EpKwgXz/cR3mW4jSfdl/8V0m0SX+QNu4l0+023iZL6NJN0QZ/5eVNY9aKq+t6uXj51wSMC1wnSwnjJligyj5zvz7rvmTxMCAAA949ngAADnYs0aAAB7i5U1a86zBgDA5sisAQDOxTQ4AAD2xjQ4AACwBYI1AMC5onSedXV1tYYPH67ExETl5+dr7969QbV77bXX5HK5NHv2bFP9EawBAM4VhWC9ZcsWlZWVqaKiQh999JHy8vJUVFSkEydOXLXdkSNH9Pjjj+uee+4x3SfBGgAQ89rb2wNKZ2fPT9dbt26dFi9erOLiYt1+++3asGGD+vfvr40bN/bYxuv16sEHH9SaNWs0YsQI0+MjWAMAHCtc51nn5OQoJSXFXyorK7vtr6urSw0NDSosLPRfi4uLU2Fhoerr63sc51NPPaX09HQtWrQopJ+T3eAAAOcK00e3WlpalJz8jzMFejrP4tSpU/J6vcrIyAi4npGRoQMHDnTbZvfu3frNb36jxsbGkIdJsAYAOFeYgnVycnJAsA6Xs2fP6qGHHtJLL72ktLS0kF/nugnWIx/fY7rNHx4fFFJff1Bo7eAE5k9MCq3NuRDa9J7eWh/r6KU2oTrI77okaZB6nt7tzldGKL8TzpCWlqb4+Hi1trYGXG9tbVVmZuYV9Zubm3XkyBHNmjXLf83nu3RiWp8+fdTU1KSRI0des1/WrAEAjhWuNetgJSQkaPz48aqtrfVf8/l8qq2tVUFBwRX1R40apb/+9a9qbGz0l+9+97u699571djYqJyc4I6jvW4yawBADIrC40bLysq0YMECTZgwQRMnTlRVVZU6OjpUXFwsSZo/f74GDx6syspKJSYmavTo0QHtBw4cKElXXL8agjUAACbMnTtXJ0+e1KpVq+TxeDRu3DjV1NT4N50dPXpUcXHhnbgmWAMAHCtazwYvLS1VaWlpt9+rq6u7attNmzaZ7o9gDQBwrhg5dYsNZgAA2ByZNQDAuWIksyZYAwAcy/X3YqW9EzANDgCAzZFZAwCci2lwAADsLVof3eptBGsAgHPFSGbNmjUAADZHZg0AcDaHZMdWEKwBAI4VK2vWTIMDAGBzZNYAAOeKkQ1mBGsAgGMxDQ4AAGyBzBoA4FxMgwMAYG9MgwMAAFsgswYAOBfT4AAA2BzBGgAAe2PNGgAA2AKZNQDAuZgGBwDA3lyGIZcResS10rY3MQ0OAIDNkVkDAJyLaXAAAOyN3eAAAMAWyKwBAM7FNDgAAPbGNDgAALAFMmsAgHMxDQ4AgL3FyjQ4wRoA4FwxklmzZg0AgM2RWQMAHM0pU9lWEKwBAM5lGJeKlfYOwDQ4AAA2ZypYV1ZW6s4771RSUpLS09M1e/ZsNTU1BdS5cOGCSkpKNGjQIA0YMEBz5sxRa2trWAcNAID0j93gVooTmArWu3btUklJifbs2aMdO3bo4sWLmjZtmjo6Ovx1li1bpj/+8Y/aunWrdu3apWPHjum+++4L+8ABAPDvBrdSHMDUmnVNTU3A15s2bVJ6eroaGho0efJktbW16Te/+Y02b96sf/3Xf5Ukvfzyy7rtttu0Z88e3XXXXVe8Zmdnpzo7O/1ft7e3h/JzAABw3bK0Zt3W1iZJSk1NlSQ1NDTo4sWLKiws9NcZNWqUhg4dqvr6+m5fo7KyUikpKf6Sk5NjZUgAgBji8lkvThBysPb5fFq6dKkmTZqk0aNHS5I8Ho8SEhI0cODAgLoZGRnyeDzdvk55ebna2tr8paWlJdQhAQBiDdPgV1dSUqL9+/dr9+7dlgbgdrvldrstvQYAANezkDLr0tJSvfXWW9q5c6eGDBniv56Zmamuri6dOXMmoH5ra6syMzMtDRQAgH/GbvBuGIah0tJSbdu2TX/+85+Vm5sb8P3x48erb9++qq2t9V9ramrS0aNHVVBQEJ4RAwBw2eWHolgpDmBqGrykpESbN2/Wm2++qaSkJP86dEpKivr166eUlBQtWrRIZWVlSk1NVXJysh577DEVFBR0uxMcAAArOHWrG+vXr5ckTZkyJeD6yy+/rIULF0qSfv7znysuLk5z5sxRZ2enioqK9OKLL4ZlsAAAxCJTwdoIYrogMTFR1dXVqq6uDnlQAAAEJUaOyOQgDwCAY8XKNDgHeQAAYHNk1gAA54qRIzIJ1gAAx2IaHAAA2AKZNQDAudgNDgCAvTENDgAAbIHMGgDgXD7jUrHS3gEI1gAA52LNGgAAe3PJ4pp12EYSWaxZAwBgc2TWAADn4glmAADYGx/dAgAA3aqurtbw4cOVmJio/Px87d27t8e6L730ku655x7deOONuvHGG1VYWHjV+t0hWAMAnMsIQzFpy5YtKisrU0VFhT766CPl5eWpqKhIJ06c6LZ+XV2d5s2bp507d6q+vl45OTmaNm2aPv/886D7JFgDABzLZRiWiyS1t7cHlM7Ozh77XLdunRYvXqzi4mLdfvvt2rBhg/r376+NGzd2W/+VV17RD3/4Q40bN06jRo3Sr3/9a/l8PtXW1gb9cxKsAQAxLycnRykpKf5SWVnZbb2uri41NDSosLDQfy0uLk6FhYWqr68Pqq/z58/r4sWLSk1NDXp8bDADADiX7+/FSntJLS0tSk5O9l92u93dVj916pS8Xq8yMjICrmdkZOjAgQNBdbl8+XJlZ2cHBPxrIVgDABzr61PZobaXpOTk5IBgHSk/+clP9Nprr6murk6JiYlBtyNYAwAQpLS0NMXHx6u1tTXgemtrqzIzM6/a9mc/+5l+8pOf6E9/+pPGjh1rql/WrAEAztXLu8ETEhI0fvz4gM1hlzeLFRQU9Nhu7dq1evrpp1VTU6MJEyaY61Rk1gAAJ4vCE8zKysq0YMECTZgwQRMnTlRVVZU6OjpUXFwsSZo/f74GDx7s36T205/+VKtWrdLmzZs1fPhweTweSdKAAQM0YMCAoPokWAMAHCsaTzCbO3euTp48qVWrVsnj8WjcuHGqqanxbzo7evSo4uL+MXG9fv16dXV16d///d8DXqeiokKrV68Oqk+CNQAAJpWWlqq0tLTb79XV1QV8feTIEcv9EawBAM7FQR4AANiby3epWGnvBOwGBwDA5sisAQDOxTQ4AAA2F+LJWQHtHYBpcAAAbI7MGgDgWOF6NrjdEawBAM4VI2vWTIMDAGBzZNYAAOcyZO08a2ck1gRrAIBzsWYNAIDdGbK4Zh22kUQUa9YAANgcmTUAwLliZDc4wRoA4Fw+SS6L7R2AaXAAAGyOzBoA4FjsBgcAwO5iZM2aaXAAAGyOzBoA4FwxklkTrAEAzhUjwZppcAAAbI7MGgDgXDHyOWuCNQDAsfjoFgAAdseaNQAAsAMyawCAc/kMyWUhO/Y5I7MmWAMAnItpcAAAYAdk1gAAB7OYWes6zKwrKyt15513KikpSenp6Zo9e7aampoC6kyZMkUulyugPPLII2EdNAAAkv4xDW6lOICpYL1r1y6VlJRoz5492rFjhy5evKhp06apo6MjoN7ixYt1/Phxf1m7dm1YBw0AQCwxNQ1eU1MT8PWmTZuUnp6uhoYGTZ482X+9f//+yszMDOo1Ozs71dnZ6f+6vb3dzJAAALHMZ8jSVLZDdoNb2mDW1tYmSUpNTQ24/sorrygtLU2jR49WeXm5zp8/3+NrVFZWKiUlxV9ycnKsDAkAEEsMn/XiACFvMPP5fFq6dKkmTZqk0aNH+68/8MADGjZsmLKzs7Vv3z4tX75cTU1NeuONN7p9nfLycpWVlfm/bm9vJ2ADAPA1IQfrkpIS7d+/X7t37w64/vDDD/v/PGbMGGVlZWnq1Klqbm7WyJEjr3gdt9stt9sd6jAAALGMz1n3rLS0VG+99ZZ27typIUOGXLVufn6+JOnQoUOhdAUAQM98hvXiAKYya8Mw9Nhjj2nbtm2qq6tTbm7uNds0NjZKkrKyskIaIAAAPYqRzNpUsC4pKdHmzZv15ptvKikpSR6PR5KUkpKifv36qbm5WZs3b9bMmTM1aNAg7du3T8uWLdPkyZM1duzYiPwAAABc70wF6/Xr10u69OCTr3v55Ze1cOFCJSQk6E9/+pOqqqrU0dGhnJwczZkzRytWrAjbgAEA8DNkMbMO20giyvQ0+NXk5ORo165dlgYEAEDQYmQanIM8AACwOQ7yAAA4l88nycKDTXzX+UNRAACIOqbBAQCAHZBZAwCcK0Yya4I1AMC5OHULAADYAZk1AMCxDMMnw8Ixl1ba9iaCNQDAuQyLh3GwZg0AQIQZFtesHRKsWbMGAMDmyKwBAM7l80kuC+vOrFkDABBhTIMDAAA7ILMGADiW4fPJsDANzke3AACINKbBAQCAHZBZAwCcy2dIrus/syZYAwCcyzAkWfnoljOCNdPgAADYHJk1AMCxDJ8hw8I0uOGQzJpgDQBwLsMna9PgzvjoFtPgAADHMnyG5RKK6upqDR8+XImJicrPz9fevXuvWn/r1q0aNWqUEhMTNWbMGL3zzjum+iNYAwBgwpYtW1RWVqaKigp99NFHysvLU1FRkU6cONFt/ffff1/z5s3TokWL9Je//EWzZ8/W7NmztX///qD7dBk2m7Bva2vTwIEDdbdmqo/6Rns4AACTvtJF7dY7OnPmjFJSUiLSR3t7u1JSUizHistjbWlpUXJysv+62+2W2+3utk1+fr7uvPNO/fKXv5Qk+Xw+5eTk6LHHHtOTTz55Rf25c+eqo6NDb731lv/aXXfdpXHjxmnDhg3BDdSwmZaWlsuPo6FQKBSKg0tLS0vEYsWXX35pZGZmhmWcAwYMuOJaRUVFt/12dnYa8fHxxrZt2wKuz58/3/jud7/bbZucnBzj5z//ecC1VatWGWPHjg3657XdBrPs7Gy1tLQoKSlJLpcr4Hvt7e3Kycm54l9AsYb7cAn34RLuwyXch0vscB8Mw9DZs2eVnZ0dsT4SExN1+PBhdXV1WX4twzCuiDc9ZdWnTp2S1+tVRkZGwPWMjAwdOHCg2zYej6fb+h6PJ+gx2i5Yx8XFaciQIVetk5ycHNO/jJdxHy7hPlzCfbiE+3BJtO9DpKa/vy4xMVGJiYkR78cO2GAGAECQ0tLSFB8fr9bW1oDrra2tyszM7LZNZmamqfrdIVgDABCkhIQEjR8/XrW1tf5rPp9PtbW1Kigo6LZNQUFBQH1J2rFjR4/1u2O7afCrcbvdqqio6HEtIVZwHy7hPlzCfbiE+3AJ9yHyysrKtGDBAk2YMEETJ05UVVWVOjo6VFxcLEmaP3++Bg8erMrKSknSkiVL9O1vf1vPP/+8vvOd7+i1117Thx9+qF/96ldB92m7j24BAGB3v/zlL/Xcc8/J4/Fo3LhxeuGFF5Sfny9JmjJlioYPH65Nmzb562/dulUrVqzQkSNHdMstt2jt2rWaOXNm0P0RrAEAsDnWrAEAsDmCNQAANkewBgDA5gjWAADYnGOCtdnjyK5Hq1evlsvlCiijRo2K9rAi7r333tOsWbOUnZ0tl8ul7du3B3zfMAytWrVKWVlZ6tevnwoLC3Xw4MHoDDaCrnUfFi5ceMX7Y/r06dEZbIRUVlbqzjvvVFJSktLT0zV79mw1NTUF1Llw4YJKSko0aNAgDRgwQHPmzLnigRROF8x9mDJlyhXvh0ceeSRKI4ZVjgjWZo8ju55985vf1PHjx/1l9+7d0R5SxHV0dCgvL0/V1dXdfn/t2rV64YUXtGHDBn3wwQe64YYbVFRUpAsXLvTySCPrWvdBkqZPnx7w/nj11Vd7cYSRt2vXLpWUlGjPnj3asWOHLl68qGnTpqmjo8NfZ9myZfrjH/+orVu3ateuXTp27Jjuu+++KI46/IK5D5K0ePHigPfD2rVrozRiWBb0kR9RNHHiRKOkpMT/tdfrNbKzs43Kysoojqr3VVRUGHl5edEeRlRJCjjtxufzGZmZmcZzzz3nv3bmzBnD7XYbr776ahRG2Dv++T4YhmEsWLDA+N73vheV8UTLiRMnDEnGrl27DMO49P++b9++xtatW/11Pv74Y0OSUV9fH61hRtw/3wfDMIxvf/vbxpIlS6I3KISV7TPrrq4uNTQ0qLCw0H8tLi5OhYWFqq+vj+LIouPgwYPKzs7WiBEj9OCDD+ro0aPRHlJUHT58WB6PJ+D9kZKSovz8/Jh8f9TV1Sk9PV233nqrHn30UZ0+fTraQ4qotrY2SVJqaqokqaGhQRcvXgx4P4waNUpDhw69rt8P/3wfLnvllVeUlpam0aNHq7y8XOfPn4/G8BAGtn/caCjHkV2v8vPztWnTJt166606fvy41qxZo3vuuUf79+9XUlJStIcXFZePmLN6/Nz1YPr06brvvvuUm5ur5uZm/ehHP9KMGTNUX1+v+Pj4aA8v7Hw+n5YuXapJkyZp9OjRki69HxISEjRw4MCAutfz+6G7+yBJDzzwgIYNG6bs7Gzt27dPy5cvV1NTk954440ojhahsn2wxj/MmDHD/+exY8cqPz9fw4YN0+uvv65FixZFcWSwg/vvv9//5zFjxmjs2LEaOXKk6urqNHXq1CiOLDJKSkq0f//+mNi3cTU93YeHH37Y/+cxY8YoKytLU6dOVXNzs0aOHNnbw4RFtp8GD+U4slgxcOBAfeMb39ChQ4eiPZSoufwe4P1xpREjRigtLe26fH+Ulpbqrbfe0s6dOzVkyBD/9czMTHV1denMmTMB9a/X90NP96E7l59bfT2+H2KB7YN1KMeRxYpz586publZWVlZ0R5K1OTm5iozMzPg/dHe3q4PPvgg5t8fn332mU6fPn1dvT8Mw1Bpaam2bdumP//5z8rNzQ34/vjx49W3b9+A90NTU5OOHj16Xb0frnUfutPY2ChJ19X7IZY4Yhr8WseRxYrHH39cs2bN0rBhw3Ts2DFVVFQoPj5e8+bNi/bQIurcuXMB2cDhw4fV2Nio1NRUDR06VEuXLtUzzzyjW265Rbm5uVq5cqWys7M1e/bs6A06Aq52H1JTU7VmzRrNmTNHmZmZam5u1hNPPKGbb75ZRUVFURx1eJWUlGjz5s168803lZSU5F+HTklJUb9+/ZSSkqJFixaprKxMqampSk5O1mOPPaaCggLdddddUR59+FzrPjQ3N2vz5s2aOXOmBg0apH379mnZsmWaPHmyxo4dG+XRIyTR3o4erF/84hfG0KFDjYSEBGPixInGnj17oj2kXjd37lwjKyvLSEhIMAYPHmzMnTvXOHToULSHFXE7d+40JF1RFixYYBjGpY9vrVy50sjIyDDcbrcxdepUo6mpKbqDjoCr3Yfz588b06ZNM2666Sajb9++xrBhw4zFixcbHo8n2sMOq+5+fknGyy+/7K/z5ZdfGj/84Q+NG2+80ejfv7/x/e9/3zh+/Hj0Bh0B17oPR48eNSZPnmykpqYabrfbuPnmm43/+I//MNra2qI7cISMIzIBALA5269ZAwAQ6wjWAADYHMEaAACbI1gDAGBzBGsAAGyOYA0AgM0RrAEAsDmCNQAANkewBgDA5gjWAADYHMEaAACb+//McIdD2LipsAAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" }, { - "name": "stderr", + "name": "stdout", "output_type": "stream", "text": [ - "/tmp/ipykernel_30774/2167287842.py:7: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.\n", - " plt.show()\n" + "Class: Sandal\n" ] } ], @@ -211,6 +222,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "dPR_WrTZglbh" @@ -222,7 +234,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 58, "metadata": { "colab": { "base_uri": "https://localhost:8080/" @@ -235,21 +247,21 @@ "name": "stdout", "output_type": "stream", "text": [ - "--2022-09-19 18:26:12-- https://github.com/mlc-ai/web-data/raw/main/models/fasionmnist_mlp_params.pkl\n", - "Resolving github.com (github.com)... 192.30.255.112\n", - "Connecting to github.com (github.com)|192.30.255.112|:443... connected.\n", + "--2023-05-10 23:27:23-- https://github.com/mlc-ai/web-data/raw/main/models/fasionmnist_mlp_params.pkl\n", + "Resolving github.com (github.com)... 140.82.113.4\n", + "Connecting to github.com (github.com)|140.82.113.4|:443... connected.\n", "HTTP request sent, awaiting response... 302 Found\n", "Location: https://raw.githubusercontent.com/mlc-ai/web-data/main/models/fasionmnist_mlp_params.pkl [following]\n", - "--2022-09-19 18:26:13-- https://raw.githubusercontent.com/mlc-ai/web-data/main/models/fasionmnist_mlp_params.pkl\n", - "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.108.133, 185.199.111.133, 185.199.110.133, ...\n", - "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.108.133|:443... connected.\n", + "--2023-05-10 23:27:24-- https://raw.githubusercontent.com/mlc-ai/web-data/main/models/fasionmnist_mlp_params.pkl\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 2606:50c0:8000::154, 2606:50c0:8001::154, 2606:50c0:8002::154, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|2606:50c0:8000::154|:443... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 407396 (398K) [application/octet-stream]\n", - "Saving to: ‘fasionmnist_mlp_params.pkl.1’\n", + "Saving to: ‘fasionmnist_mlp_params.pkl’\n", "\n", "fasionmnist_mlp_par 100%[===================>] 397.85K --.-KB/s in 0.05s \n", "\n", - "2022-09-19 18:26:13 (7.18 MB/s) - ‘fasionmnist_mlp_params.pkl.1’ saved [407396/407396]\n", + "2023-05-10 23:27:24 (7.70 MB/s) - ‘fasionmnist_mlp_params.pkl’ saved [407396/407396]\n", "\n" ] } @@ -259,6 +271,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "Rk5jwmmDzddJ" @@ -271,6 +284,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "hk64a3UllGIV" @@ -280,6 +294,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "PUcCRU2IQPm-" @@ -290,7 +305,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 60, "metadata": { "id": "vvfOgcu-YdaB" }, @@ -305,7 +320,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 61, "metadata": { "colab": { "base_uri": "https://localhost:8080/" @@ -318,8 +333,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "[[-10.968934 -13.400272 -7.7212744 -7.4016604 -7.5777307 8.872316\n", - " -6.1305714 -8.879843 -3.4321747 -2.1780372]]\n", + "[[ -8.505112 -19.33341 -5.5189652 -6.8927536 -14.0578785 11.494652\n", + " -11.22116 -9.992905 -2.6286726 -18.563715 ]]\n", "[5]\n", "Numpy-MLP Prediction: Sandal\n" ] @@ -340,6 +355,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "IgYd_scF1Vjw" @@ -433,6 +449,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "8OSO2aYs-hFD" @@ -445,7 +462,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 5, "metadata": { "id": "6wADqZ_TpGl7" }, @@ -454,8 +471,8 @@ "@tvm.script.ir_module\n", "class MyModule: \n", " @T.prim_func\n", - " def relu0(X: T.Buffer[(1, 128), \"float32\"], \n", - " Y: T.Buffer[(1, 128), \"float32\"]):\n", + " def relu0(X: T.Buffer((1, 128), \"float32\"), \n", + " Y: T.Buffer((1, 128), \"float32\")):\n", " # function attr dict\n", " T.func_attr({\"global_symbol\": \"relu0\", \"tir.noalias\": True})\n", " for i, j in T.grid(1, 128):\n", @@ -464,10 +481,10 @@ " Y[vi, vj] = T.max(X[vi, vj], T.float32(0))\n", "\n", " @T.prim_func\n", - " def linear0(X: T.Buffer[(1, 784), \"float32\"], \n", - " W: T.Buffer[(128, 784), \"float32\"], \n", - " B: T.Buffer[(128,), \"float32\"], \n", - " Z: T.Buffer[(1, 128), \"float32\"]):\n", + " def linear0(X: T.Buffer((1, 784), \"float32\"), \n", + " W: T.Buffer((128, 784), \"float32\"), \n", + " B: T.Buffer((128,), \"float32\"), \n", + " Z: T.Buffer((1, 128), \"float32\")):\n", " T.func_attr({\"global_symbol\": \"linear0\", \"tir.noalias\": True})\n", " Y = T.alloc_buffer((1, 128), \"float32\")\n", " for i, j, k in T.grid(1, 128, 784):\n", @@ -483,10 +500,10 @@ " Z[vi, vj] = Y[vi, vj] + B[vj]\n", "\n", " @T.prim_func\n", - " def linear1(X: T.Buffer[(1, 128), \"float32\"], \n", - " W: T.Buffer[(10, 128), \"float32\"], \n", - " B: T.Buffer[(10,), \"float32\"], \n", - " Z: T.Buffer[(1, 10), \"float32\"]):\n", + " def linear1(X: T.Buffer((1, 128), \"float32\"), \n", + " W: T.Buffer((10, 128), \"float32\"), \n", + " B: T.Buffer((10,), \"float32\"), \n", + " Z: T.Buffer((1, 10), \"float32\")):\n", " T.func_attr({\"global_symbol\": \"linear1\", \"tir.noalias\": True})\n", " Y = T.alloc_buffer((1, 10), \"float32\")\n", " for i, j, k in T.grid(1, 10, 128):\n", @@ -508,14 +525,15 @@ " w1: R.Tensor((10, 128), \"float32\"), \n", " b1: R.Tensor((10,), \"float32\")):\n", " with R.dataflow():\n", - " lv0 = R.call_tir(linear0, (x, w0, b0), (1, 128), dtype=\"float32\")\n", - " lv1 = R.call_tir(relu0, (lv0,), (1, 128), dtype=\"float32\")\n", - " out = R.call_tir(linear1, (lv1, w1, b1), (1, 10), dtype=\"float32\")\n", + " lv0 = R.call_dps_packed(\"linear0\", (x, w0, b0), out_sinfo=R.Tensor((1, 128), dtype=\"float32\"))\n", + " lv1 = R.call_dps_packed(\"relu0\", (lv0,), out_sinfo=R.Tensor((1, 128), dtype=\"float32\"))\n", + " out = R.call_dps_packed(\"linear1\", (lv1, w1, b1), out_sinfo=R.Tensor((1, 10), dtype=\"float32\"))\n", " R.output(out)\n", " return out" ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "d5-LtDpRoia2" @@ -524,10 +542,11 @@ "The above code contains kinds of functions: the primitive tensor functions (`T.prim_func`) that we saw in the last lecture and a new `R.function` (relax function). \n", "Relax function is a new type of abstraction representing high-level neural network executions. \n", "\n", - "Again it is helpful to see the TVMScript code and low-level numpy code side-by-side and check the corresponding elements, and we are going to walk through each of them in detail. Since we already learned about primitive tensor functions, we are going to focus on the high-level execution part." + "Again it is helpful to see the TVMScript code and low-level numpy code side-by-side and check the corresponding elements, and we are going to walk through each of them in detail. Since we already learned about primitive tensor functions, we are going to focus on the high-level execution part. Note that the `call_tir` API below has been changed to `call_dps_packed`." ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "Cq1FNpoNojNx" @@ -537,6 +556,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "hhfr5u-2msNV" @@ -546,6 +566,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "CQjGG5AxmdwJ" @@ -555,6 +576,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "nYweMW1krbih" @@ -569,28 +591,30 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "3WoknpyJYcph" }, "source": [ - "### call_tir construct\n", + "### call_dps_packed construct\n", "\n", - "One thing that you may have noticed is that each step of operations in the computational graph contains an `R.call_tir` operation. This is the operation that brings in the tensor primitive functions\n", + "One thing that you may have noticed is that each step of operations in the computational graph contains an `R.call_dps_packed` operation. This is the operation that brings in the tensor primitive functions\n", "\n", "\n", "```python\n", - "lv0 = R.call_tir(linear0, (x, w0, b0), (1, 128), dtype=\"float32\")\n", + "lv0 = R.call_dps_packed(\"linear0\", (x, w0, b0), out_sinfo=R.Tensor((1, 128), dtype=\"float32\"))\n", "```" ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "LQ-48agqtixI" }, "source": [ - "To explain what does `R.call_tir` mean, let us review an equivalent low-level numpy implementation of the operation, as follows:" + "To explain what does `R.call_dps_packed` mean, let us review an equivalent low-level numpy implementation of the operation, as follows:" ] }, { @@ -601,23 +625,24 @@ }, "outputs": [], "source": [ - "def lnumpy_call_tir(prim_func, inputs, shape, dtype):\n", + "def lnumpy_call_dps_packed(prim_func, inputs, shape, dtype):\n", " res = np.empty(shape, dtype=dtype)\n", " prim_func(*inputs, res)\n", " return res" ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "Juc_EGsIt7js" }, "source": [ - "Specifically, call_tir takes in a primitive function (`prim_func`) a list of inputs. Then what it does is allocate an output tensor `res`, then pass the inputs and the output to the `prim_func`. After executing `prim_func` the result is populated in `res`, then we can return the result.\n", + "Specifically, call_dps_packed takes in a primitive function (`prim_func`) a list of inputs. Then what it does is allocate an output tensor `res`, then pass the inputs and the output to the `prim_func`. After executing `prim_func` the result is populated in `res`, then we can return the result.\n", "\n", - "Note that `lnumpy_call_tir` is only a reference implementation to show the meaning of `R.call_tir`. In practice, there can be different low-level ways to optimize the execution. For example, we might choose to allocate all the output memories ahead of time and then run the execution, which we will cover in future lectures. \n", + "Note that `lnumpy_call_dps_packed` is only a reference implementation to show the meaning of `R.call_dps_packed`. In practice, there can be different low-level ways to optimize the execution. For example, we might choose to allocate all the output memories ahead of time and then run the execution, which we will cover in future lectures. \n", "\n", - "A natural question that one could ask is why do we need `call_tir` construct? This is because our primitive tensor functions take the following calling convention.\n", + "A natural question that one could ask is why do we need `call_dps_packed` construct? This is because our primitive tensor functions take the following calling convention.\n", "\n", "```python\n", "def low_level_prim_func(in0, in1, ..., out):\n", @@ -627,6 +652,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "msKx_bChuLfU" @@ -649,6 +675,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "UEjQ0O-20U7b" @@ -658,6 +685,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "-hpjGSxf0j7D" @@ -672,25 +700,26 @@ "\n", "Of course, we can still generalize the graph definition by introducing the input edge and output edge, and that can complicate the possible transformations associated with the abstraction. \n", "\n", - "So coming back to `call_tir`, the key insight here is that we want to hide possible allocation or explicit writing to the functions. In a more formal term, we want the function to be **pure** or **side-effect free**.\n", + "So coming back to `call_dps_packed`, the key insight here is that we want to hide possible allocation or explicit writing to the functions. In a more formal term, we want the function to be **pure** or **side-effect free**.\n", "\n", "A function is **pure** or **side-effect free** if: it only reads from its inputs and returns the result via its output, it will not change other parts of the program (such as incrementing a global counter).\n", "\n", - "**call_tir** is a way for us to hide these details of calling into low-level primitive functions and expose them into a computational graph." + "**call_dps_packed** is a way for us to hide these details of calling into low-level primitive functions and expose them into a computational graph." ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "qLVcODPTZj_v" }, "source": [ - "We can also see `call_tir` in action in the low-level numpy as well. Now we have defined the `lnumpy_call_tir`, we can rewrite the low-level numpy execution code as:" + "We can also see `call_dps_packed` in action in the low-level numpy as well. Now we have defined the `lnumpy_call_dps_packed`, we can rewrite the low-level numpy execution code as:" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 4, "metadata": { "colab": { "base_uri": "https://localhost:8080/" @@ -698,23 +727,15 @@ "id": "nRj7MsLWYeO5", "outputId": "e4378e32-2706-491a-8f42-799199fac042" }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Low-level Numpy with CallTIR Prediction: Sandal\n" - ] - } - ], + "outputs": [], "source": [ - "def lnumpy_mlp_with_call_tir(data, w0, b0, w1, b1):\n", - " lv0 = lnumpy_call_tir(lnumpy_linear0, (data, w0, b0), (1, 128), dtype=\"float32\")\n", - " lv1 = lnumpy_call_tir(lnumpy_relu0, (lv0, ), (1, 128), dtype=\"float32\")\n", - " out = lnumpy_call_tir(lnumpy_linear1, (lv1, w1, b1), (1, 10), dtype=\"float32\")\n", + "def lnumpy_mlp_with_call_dps_packed(data, w0, b0, w1, b1):\n", + " lv0 = lnumpy_call_dps_packed(lnumpy_linear0, (data, w0, b0), (1, 128), dtype=\"float32\")\n", + " lv1 = lnumpy_call_dps_packed(lnumpy_relu0, (lv0, ), (1, 128), dtype=\"float32\")\n", + " out = lnumpy_call_dps_packed(lnumpy_linear1, (lv1, w1, b1), (1, 10), dtype=\"float32\")\n", " return out\n", "\n", - "result = lnumpy_mlp_with_call_tir(\n", + "result = lnumpy_mlp_with_call_dps_packed(\n", " img.reshape(1, 784), \n", " mlp_params[\"w0\"], \n", " mlp_params[\"b0\"], \n", @@ -722,19 +743,21 @@ " mlp_params[\"b1\"])\n", "\n", "pred_kind = np.argmax(result, axis=1)\n", - "print(\"Low-level Numpy with CallTIR Prediction:\", class_names[pred_kind[0]])" + "print(\"Low-level Numpy with Call DPS Packed Prediction:\", class_names[pred_kind[0]])" ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "Fx0vj13N3Fro" }, "source": [ - "In practice, the lowest-level implementation will have explicit memory allocations, so `call_tir` mainly serves as a purpose for us to continue to do some high-level transformations before we generate the actual implementation." + "In practice, the lowest-level implementation will have explicit memory allocations, so `call_dps_packed` mainly serves as a purpose for us to continue to do some high-level transformations before we generate the actual implementation." ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "5QIpOq6O3dfw" @@ -746,9 +769,9 @@ "\n", "```python\n", "with R.dataflow():\n", - " lv0 = R.call_tir(linear0, (x, w0, b0), (1, 128), dtype=\"float32\")\n", - " lv1 = R.call_tir(relu0, (lv0,), (1, 128), dtype=\"float32\")\n", - " out = R.call_tir(linear1, (lv1, w1, b1), (1, 10), dtype=\"float32\")\n", + " lv0 = R.call_dps_packed(\"linear0\", (x, w0, b0), out_sinfo=R.Tensor((1, 128), dtype=\"float32\"))\n", + " lv1 = R.call_dps_packed(\"relu0\", (lv0,), out_sinfo=R.Tensor((1, 128), dtype=\"float32\"))\n", + " out = R.call_dps_packed(\"linear1\", (lv1, w1, b1), out_sinfo=R.Tensor((1, 10), dtype=\"float32\"))\n", " R.output(out)\n", "```\n", "\n", @@ -765,14 +788,14 @@ " b1: Tensor((10,), \"float32\")):\n", "\n", " with R.dataflow():\n", - " lv0 = R.call_tir(linear0, (x, w0, b0), (1, 128), dtype=\"float32\")\n", - " gv0 = R.call_tir(relu0, (lv0,), (1, 128), dtype=\"float32\")\n", + " lv0 = R.call_dps_packed(\"linear0\", (x, w0, b0), out_sinfo=R.Tensor((1, 128), dtype=\"float32\"))\n", + " gv0 = R.call_dps_packed(\"relu0\", (lv0,), out_sinfo=R.Tensor((1, 128), dtype=\"float32\"))\n", " R.output(gv0)\n", "\n", " gv1 = R.alloc_tensor((1, 128), dtype=\"float32\")\n", "\n", " with R.dataflow():\n", - " out = R.call_tir(linear1, (gv0, gv1, b0), (1, 128), dtype=\"float32\")\n", + " out = R.call_dps_packed(\"linear1\", (gv0, gv1, b0), out_sinfo=R.Tensor((1, 128), dtype=\"float32\"))\n", " R.output(out)\n", " return out\n", "```\n", @@ -781,6 +804,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "Y9aYKOoiC47j" @@ -791,13 +815,14 @@ "\n", "So far, we have gone through one example instance of relax program and covered most of the elements, including:\n", "- computational graph view\n", - "- `call_tir` construct\n", + "- `call_dps_packed` construct\n", "- Dataflow block.\n", "\n", "These elements should get us started in the end to end model execution and compilation. we will also cover new concepts as we encounter them in later chapters." ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "SXa8L7_OhGTX" @@ -811,7 +836,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 6, "metadata": { "colab": { "base_uri": "https://localhost:8080/", @@ -824,28 +849,23 @@ { "data": { "text/html": [ - "
@tvm.script.ir_module\n",
-       "class Module:\n",
+       "
# from tvm.script import ir as I\n",
+       "# from tvm.script import tir as T\n",
+       "# from tvm.script import relax as R\n",
+       "\n",
+       "\n",
+       "@I.ir_module\n",
+       "class Module:\n",
        "    @T.prim_func\n",
-       "    def relu0(X: T.Buffer[(1, 128), "float32"], Y: T.Buffer[(1, 128), "float32"]) -> None:\n",
-       "        # function attr dict\n",
-       "        T.func_attr({"global_symbol": "relu0", "tir.noalias": True})\n",
-       "        # body\n",
-       "        # with T.block("root")\n",
-       "        for i, j in T.grid(1, 128):\n",
-       "            with T.block("Y"):\n",
-       "                vi, vj = T.axis.remap("SS", [i, j])\n",
-       "                T.reads(X[vi, vj])\n",
-       "                T.writes(Y[vi, vj])\n",
-       "                Y[vi, vj] = T.max(X[vi, vj], T.float32(0))\n",
-       "    \n",
-       "    @T.prim_func\n",
-       "    def linear0(X: T.Buffer[(1, 784), "float32"], W: T.Buffer[(128, 784), "float32"], B: T.Buffer[128, "float32"], Z: T.Buffer[(1, 128), "float32"]) -> None:\n",
-       "        # function attr dict\n",
-       "        T.func_attr({"global_symbol": "linear0", "tir.noalias": True})\n",
-       "        # body\n",
-       "        # with T.block("root")\n",
-       "        Y = T.alloc_buffer([1, 128], dtype="float32")\n",
+       "    def linear0(\n",
+       "        X: T.Buffer((1, 784), "float32"),\n",
+       "        W: T.Buffer((128, 784), "float32"),\n",
+       "        B: T.Buffer((128,), "float32"),\n",
+       "        Z: T.Buffer((1, 128), "float32"),\n",
+       "    ):\n",
+       "        T.func_attr({"global_symbol": "linear0", "tir.noalias": T.bool(True)})\n",
+       "        # with T.block("root"):\n",
+       "        Y = T.alloc_buffer((1, 128))\n",
        "        for i, j, k in T.grid(1, 128, 784):\n",
        "            with T.block("Y"):\n",
        "                vi, vj, vk = T.axis.remap("SSR", [i, j, k])\n",
@@ -860,24 +880,17 @@
        "                T.reads(Y[vi, vj], B[vj])\n",
        "                T.writes(Z[vi, vj])\n",
        "                Z[vi, vj] = Y[vi, vj] + B[vj]\n",
-       "    \n",
-       "    @R.function\n",
-       "    def main(x: Tensor((1, 784), "float32"), w0: Tensor((128, 784), "float32"), b0: Tensor((128,), "float32"), w1: Tensor((10, 128), "float32"), b1: Tensor((10,), "float32")) -> Tensor(None, "float32", ndim = 2):\n",
-       "        # block 0\n",
-       "        with R.dataflow():\n",
-       "            lv0 = R.call_tir(linear0, (x, w0, b0), (1, 128), dtype="float32")\n",
-       "            lv1 = R.call_tir(relu0, (lv0,), (1, 128), dtype="float32")\n",
-       "            out = R.call_tir(linear1, (lv1, w1, b1), (1, 10), dtype="float32")\n",
-       "            R.output(out)\n",
-       "        return out\n",
-       "    \n",
+       "\n",
        "    @T.prim_func\n",
-       "    def linear1(X: T.Buffer[(1, 128), "float32"], W: T.Buffer[(10, 128), "float32"], B: T.Buffer[10, "float32"], Z: T.Buffer[(1, 10), "float32"]) -> None:\n",
-       "        # function attr dict\n",
-       "        T.func_attr({"global_symbol": "linear1", "tir.noalias": True})\n",
-       "        # body\n",
-       "        # with T.block("root")\n",
-       "        Y = T.alloc_buffer([1, 10], dtype="float32")\n",
+       "    def linear1(\n",
+       "        X: T.Buffer((1, 128), "float32"),\n",
+       "        W: T.Buffer((10, 128), "float32"),\n",
+       "        B: T.Buffer((10,), "float32"),\n",
+       "        Z: T.Buffer((1, 10), "float32"),\n",
+       "    ):\n",
+       "        T.func_attr({"global_symbol": "linear1", "tir.noalias": T.bool(True)})\n",
+       "        # with T.block("root"):\n",
+       "        Y = T.alloc_buffer((1, 10))\n",
        "        for i, j, k in T.grid(1, 10, 128):\n",
        "            with T.block("Y"):\n",
        "                vi, vj, vk = T.axis.remap("SSR", [i, j, k])\n",
@@ -892,7 +905,38 @@
        "                T.reads(Y[vi, vj], B[vj])\n",
        "                T.writes(Z[vi, vj])\n",
        "                Z[vi, vj] = Y[vi, vj] + B[vj]\n",
-       "    \n",
+       "\n",
+       "    @T.prim_func\n",
+       "    def relu0(X: T.Buffer((1, 128), "float32"), Y: T.Buffer((1, 128), "float32")):\n",
+       "        T.func_attr({"global_symbol": "relu0", "tir.noalias": T.bool(True)})\n",
+       "        # with T.block("root"):\n",
+       "        for i, j in T.grid(1, 128):\n",
+       "            with T.block("Y"):\n",
+       "                vi, vj = T.axis.remap("SS", [i, j])\n",
+       "                T.reads(X[vi, vj])\n",
+       "                T.writes(Y[vi, vj])\n",
+       "                Y[vi, vj] = T.max(X[vi, vj], T.float32(0))\n",
+       "\n",
+       "    @R.function\n",
+       "    def main(\n",
+       "        x: R.Tensor((1, 784), dtype="float32"),\n",
+       "        w0: R.Tensor((128, 784), dtype="float32"),\n",
+       "        b0: R.Tensor((128,), dtype="float32"),\n",
+       "        w1: R.Tensor((10, 128), dtype="float32"),\n",
+       "        b1: R.Tensor((10,), dtype="float32"),\n",
+       "    ) -> R.Tensor((1, 10), dtype="float32"):\n",
+       "        with R.dataflow():\n",
+       "            lv0 = R.call_dps_packed(\n",
+       "                "linear0", (x, w0, b0), out_sinfo=R.Tensor((1, 128), dtype="float32")\n",
+       "            )\n",
+       "            lv1 = R.call_dps_packed(\n",
+       "                "relu0", (lv0,), out_sinfo=R.Tensor((1, 128), dtype="float32")\n",
+       "            )\n",
+       "            out = R.call_dps_packed(\n",
+       "                "linear1", (lv1, w1, b1), out_sinfo=R.Tensor((1, 10), dtype="float32")\n",
+       "            )\n",
+       "            R.output(out)\n",
+       "        return out\n",
        "
\n" ], "text/plain": [ @@ -908,6 +952,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "Ai-pjfobEpoi" @@ -944,6 +989,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "uSyXxd3fE7rg" @@ -964,6 +1010,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "lpIMjrVdFR0d" @@ -974,7 +1021,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 62, "metadata": { "id": "9mOk7BkxFRC9" }, @@ -985,6 +1032,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "ksx1-hl1FrtA" @@ -1028,6 +1076,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "0Cv6FEY1F2fx" @@ -1061,6 +1110,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "g2pBn9vlTz3I" @@ -1091,29 +1141,31 @@ " b1: R.Tensor((10,), \"float32\")):\n", " # block 0\n", " with R.dataflow():\n", - " lv0 = R.call_tir(\"env.linear\", (x, w0, b0), (1, 128), dtype=\"float32\")\n", - " lv1 = R.call_tir(\"env.relu\", (lv0,), (1, 128), dtype=\"float32\")\n", - " out = R.call_tir(\"env.linear\", (lv1, w1, b1), (1, 10), dtype=\"float32\")\n", + " lv0 = R.call_dps_packed(\"env.linear\", (x, w0, b0), out_sinfo=R.Tensor((1, 128), dtype=\"float32\"))\n", + " lv1 = R.call_dps_packed(\"env.relu\", (lv0,), out_sinfo=R.Tensor((1, 128), dtype=\"float32\"))\n", + " out = R.call_dps_packed(\"env.linear\", (lv1, w1, b1), out_sinfo=R.Tensor((1, 10), dtype=\"float32\"))\n", " R.output(out)\n", " return out" ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "6u42cRGpGk7Y" }, "source": [ - "Note that we now directly pass in strings in `call_tir`\n", + "Note that we now directly pass in strings in `call_dps_packed`\n", "\n", "```python\n", - "R.call_tir(\"env.linear\", (x, w0, b0), (1, 128), dtype=\"float32\")\n", + "R.call_dps_packed(\"env.linear\", (x, w0, b0), out_sinfo=R.Tensor((1, 128), dtype=\"float32\"))\n", "```\n", "\n", "These strings are names of runtime functions that we expect to exist during model execution. " ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "6uBJoYjHHrlM" @@ -1153,6 +1205,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "z-UeSjTeH9QN" @@ -1166,6 +1219,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "LKkqyqATJboW" @@ -1210,6 +1264,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "pi3g5dNdWHFr" @@ -1222,7 +1277,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 64, "metadata": { "id": "poavVsmOWPDW" }, @@ -1231,10 +1286,10 @@ "@tvm.script.ir_module\n", "class MyModuleMixture: \n", " @T.prim_func\n", - " def linear0(X: T.Buffer[(1, 784), \"float32\"], \n", - " W: T.Buffer[(128, 784), \"float32\"], \n", - " B: T.Buffer[(128,), \"float32\"], \n", - " Z: T.Buffer[(1, 128), \"float32\"]):\n", + " def linear0(X: T.Buffer((1, 784), \"float32\"), \n", + " W: T.Buffer((128, 784), \"float32\"), \n", + " B: T.Buffer((128,), \"float32\"), \n", + " Z: T.Buffer((1, 128), \"float32\")):\n", " T.func_attr({\"global_symbol\": \"linear0\", \"tir.noalias\": True})\n", " Y = T.alloc_buffer((1, 128), \"float32\")\n", " for i, j, k in T.grid(1, 128, 784):\n", @@ -1256,14 +1311,15 @@ " w1: R.Tensor((10, 128), \"float32\"), \n", " b1: R.Tensor((10,), \"float32\")):\n", " with R.dataflow():\n", - " lv0 = R.call_tir(linear0, (x, w0, b0), (1, 128), dtype=\"float32\")\n", - " lv1 = R.call_tir(\"env.relu\", (lv0,), (1, 128), dtype=\"float32\")\n", - " out = R.call_tir(\"env.linear\", (lv1, w1, b1), (1, 10), dtype=\"float32\")\n", + " lv0 = R.call_dps_packed(\"linear0\", (x, w0, b0), out_sinfo=R.Tensor((1, 128), dtype=\"float32\"))\n", + " lv1 = R.call_dps_packed(\"env.relu\", (lv0,), out_sinfo=R.Tensor((1, 128), dtype=\"float32\"))\n", + " out = R.call_dps_packed(\"env.linear\", (lv1, w1, b1), out_sinfo=R.Tensor((1, 10), dtype=\"float32\"))\n", " R.output(out)\n", " return out" ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "Sy5QigNPKIp0" @@ -1306,6 +1362,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "wDDFMW1-Ksi3" @@ -1318,7 +1375,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 63, "metadata": { "colab": { "base_uri": "https://localhost:8080/", @@ -1331,25 +1388,23 @@ { "data": { "text/html": [ - "
@tvm.script.ir_module\n",
-       "class Module:\n",
-       "    @R.function\n",
-       "    def main(x: Tensor((1, 784), "float32")) -> Tensor(None, "float32", ndim = 2):\n",
-       "        # block 0\n",
-       "        with R.dataflow():\n",
-       "            lv0 = R.call_tir(linear0, (x, meta[relay.Constant][0], meta[relay.Constant][1]), (1, 128), dtype="float32")\n",
-       "            lv1 = R.call_tir("env.relu", (lv0,), (1, 128), dtype="float32")\n",
-       "            out = R.call_tir("env.linear", (lv1, meta[relay.Constant][2], meta[relay.Constant][3]), (1, 10), dtype="float32")\n",
-       "            R.output(out)\n",
-       "        return out\n",
-       "    \n",
+       "
# from tvm.script import ir as I\n",
+       "# from tvm.script import tir as T\n",
+       "# from tvm.script import relax as R\n",
+       "\n",
+       "\n",
+       "@I.ir_module\n",
+       "class Module:\n",
        "    @T.prim_func\n",
-       "    def linear0(X: T.Buffer[(1, 784), "float32"], W: T.Buffer[(128, 784), "float32"], B: T.Buffer[128, "float32"], Z: T.Buffer[(1, 128), "float32"]) -> None:\n",
-       "        # function attr dict\n",
-       "        T.func_attr({"global_symbol": "linear0", "tir.noalias": True})\n",
-       "        # body\n",
-       "        # with T.block("root")\n",
-       "        Y = T.alloc_buffer([1, 128], dtype="float32")\n",
+       "    def linear0(\n",
+       "        X: T.Buffer((1, 784), "float32"),\n",
+       "        W: T.Buffer((128, 784), "float32"),\n",
+       "        B: T.Buffer((128,), "float32"),\n",
+       "        Z: T.Buffer((1, 128), "float32"),\n",
+       "    ):\n",
+       "        T.func_attr({"global_symbol": "linear0", "tir.noalias": T.bool(True)})\n",
+       "        # with T.block("root"):\n",
+       "        Y = T.alloc_buffer((1, 128))\n",
        "        for i, j, k in T.grid(1, 128, 784):\n",
        "            with T.block("Y"):\n",
        "                vi, vj, vk = T.axis.remap("SSR", [i, j, k])\n",
@@ -1364,7 +1419,38 @@
        "                T.reads(Y[vi, vj], B[vj])\n",
        "                T.writes(Z[vi, vj])\n",
        "                Z[vi, vj] = Y[vi, vj] + B[vj]\n",
-       "    \n",
+       "\n",
+       "    @R.function\n",
+       "    def main(\n",
+       "        x: R.Tensor((1, 784), dtype="float32")\n",
+       "    ) -> R.Tensor((1, 10), dtype="float32"):\n",
+       "        with R.dataflow():\n",
+       "            lv0 = R.call_dps_packed(\n",
+       "                "linear0",\n",
+       "                (\n",
+       "                    x,\n",
+       "                    metadata["relax.expr.Constant"][0],\n",
+       "                    metadata["relax.expr.Constant"][1],\n",
+       "                ),\n",
+       "                out_sinfo=R.Tensor((1, 128), dtype="float32"),\n",
+       "            )\n",
+       "            lv1 = R.call_dps_packed(\n",
+       "                "env.relu", (lv0,), out_sinfo=R.Tensor((1, 128), dtype="float32")\n",
+       "            )\n",
+       "            out = R.call_dps_packed(\n",
+       "                "env.linear",\n",
+       "                (\n",
+       "                    lv1,\n",
+       "                    metadata["relax.expr.Constant"][2],\n",
+       "                    metadata["relax.expr.Constant"][3],\n",
+       "                ),\n",
+       "                out_sinfo=R.Tensor((1, 10), dtype="float32"),\n",
+       "            )\n",
+       "            R.output(out)\n",
+       "        return out\n",
+       "\n",
+       "\n",
+       "# Metadata omitted. Use show_meta=True in script() method to show it.\n",
        "
\n" ], "text/plain": [ @@ -1381,6 +1467,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "j8knyt63L8c0" @@ -1419,6 +1506,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "Y7UCWyFusX5X" @@ -1436,6 +1524,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "y2KrBILMsNGf" @@ -1445,6 +1534,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "VkB5oHttOY0U" @@ -1458,6 +1548,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "pZGWq5EjJ0BA" @@ -1466,7 +1557,7 @@ "## Summary\n", "- Computational graph abstraction helps to stitch primitive tensor functions together for end-to-end execution.\n", "- Key elements of relax abstraction include\n", - " - call_tir construct that embeds destination passing style primitive function into the computational graph\n", + " - call_dps_packed construct that embeds destination passing style primitive function into the computational graph\n", " - dataflow block\n", "- Computational graph allows call into both environment library functions and TensorIR functions." ] @@ -1494,7 +1585,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.1" + "version": "3.9.16" }, "vscode": { "interpreter": { From 9298571bf8f7e6dfb677eadb3c99edd1a51a2d03 Mon Sep 17 00:00:00 2001 From: Sudeep Agarwal Date: Thu, 11 May 2023 09:25:58 -0400 Subject: [PATCH 2/3] Update relax build usage --- 4_Build_End_to_End_Model.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/4_Build_End_to_End_Model.ipynb b/4_Build_End_to_End_Model.ipynb index aefd3ac..b822078 100644 --- a/4_Build_End_to_End_Model.ipynb +++ b/4_Build_End_to_End_Model.ipynb @@ -958,7 +958,7 @@ "id": "Ai-pjfobEpoi" }, "source": [ - "We call `relax.vm.build` to build this function. Relax is still under development, so some of the APIs may change. Our main goal, though, is to get familiar with the overall MLC flow (Construct, transform, build) for end-to-end models.\n" + "We call `relax.build` to build this function. Relax is still under development, so some of the APIs may change. Our main goal, though, is to get familiar with the overall MLC flow (Construct, transform, build) for end-to-end models.\n" ] }, { @@ -984,7 +984,7 @@ } ], "source": [ - "ex = relax.vm.build(MyModule, target=\"llvm\")\n", + "ex = relax.build(MyModule, target=\"llvm\")\n", "type(ex)" ] }, From 1c91ccf6afc51ef3033f971e16208ecbdbbc5b21 Mon Sep 17 00:00:00 2001 From: Sudeep Agarwal Date: Thu, 11 May 2023 10:14:07 -0400 Subject: [PATCH 3/3] Additional updates to relax build usage --- 4_Build_End_to_End_Model.ipynb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/4_Build_End_to_End_Model.ipynb b/4_Build_End_to_End_Model.ipynb index b822078..ad2a0c0 100644 --- a/4_Build_End_to_End_Model.ipynb +++ b/4_Build_End_to_End_Model.ipynb @@ -1232,7 +1232,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" @@ -1250,7 +1250,7 @@ } ], "source": [ - "ex = relax.vm.build(MyModuleWithExternCall, target=\"llvm\")\n", + "ex = relax.build(MyModuleWithExternCall, target=\"llvm\")\n", "vm = relax.VirtualMachine(ex, tvm.cpu())\n", "\n", "nd_res = vm[\"main\"](data_nd, \n", @@ -1348,7 +1348,7 @@ } ], "source": [ - "ex = relax.vm.build(MyModuleMixture, target=\"llvm\")\n", + "ex = relax.build(MyModuleMixture, target=\"llvm\")\n", "vm = relax.VirtualMachine(ex, tvm.cpu())\n", "\n", "nd_res = vm[\"main\"](data_nd, \n", @@ -1496,7 +1496,7 @@ } ], "source": [ - "ex = relax.vm.build(MyModuleWithParams, target=\"llvm\")\n", + "ex = relax.build(MyModuleWithParams, target=\"llvm\")\n", "vm = relax.VirtualMachine(ex, tvm.cpu())\n", "\n", "nd_res = vm[\"main\"](data_nd)\n",