-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
7 changed files
with
297 additions
and
304 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,155 @@ | ||
{ | ||
"cells": [ | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 4, | ||
"metadata": { | ||
"cellView": "form", | ||
"id": "-cp253OYk0zk" | ||
}, | ||
"outputs": [], | ||
"source": [ | ||
"%%capture\n", | ||
"!python -m pip install abraia\n", | ||
"!python -m pip install onnx onnxruntime\n", | ||
"\n", | ||
"import os\n", | ||
"if not os.getenv('ABRAIA_ID') and not os.getenv('ABRAIA_KEY'):\n", | ||
" #@markdown <a href=\"https://abraia.me/console/gallery\" target=\"_blank\">Upload and manage your images</a>\n", | ||
" abraia_id = '' #@param {type: \"string\"}\n", | ||
" abraia_key = '' #@param {type: \"string\"}\n", | ||
" %env ABRAIA_ID=$abraia_id\n", | ||
" %env ABRAIA_KEY=$abraia_key\n", | ||
"\n", | ||
"from abraia import Abraia\n", | ||
"\n", | ||
"multiple = Abraia()" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"import torch\n", | ||
"import torchvision\n", | ||
"\n", | ||
"dummy_input = torch.randn(1, 3, 224, 224)\n", | ||
"model = torchvision.models.alexnet(pretrained=True)\n", | ||
"model.eval()\n", | ||
"\n", | ||
"input_names = [\"input1\"]\n", | ||
"output_names = [\"output1\"]\n", | ||
"\n", | ||
"torch.onnx.export(\n", | ||
" model,\n", | ||
" dummy_input,\n", | ||
" \"assets/model.onnx\",\n", | ||
" verbose=True,\n", | ||
" input_names=input_names,\n", | ||
" output_names=output_names,\n", | ||
")" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 5, | ||
"metadata": { | ||
"id": "EmW59CFnnBHS" | ||
}, | ||
"outputs": [], | ||
"source": [ | ||
"import torch\n", | ||
"from abraia.torch import load_classes, load_model, read_image, transform\n", | ||
"\n", | ||
"dataset = 'hymenoptera_data'\n", | ||
"\n", | ||
"class_names = load_classes(os.path.join(dataset, 'model_ft.txt'))\n", | ||
"model = load_model(os.path.join(dataset, 'model_ft.pt'), class_names)\n", | ||
"# model.eval()\n", | ||
"\n", | ||
"img = read_image(os.path.join(dataset, 'dog.jpg'))\n", | ||
"input_batch = transform(img).unsqueeze(0)\n", | ||
"# TODO: Define export_onnx\n", | ||
"torch.onnx.export(model, input_batch, 'model_ft.onnx', export_params=True, opset_version=10, do_constant_folding=True, input_names=['input'], output_names=['output'])\n", | ||
"# onnx_model = onnx.load('model_ft.onnx')\n", | ||
"# onnx.checker.check_model(onnx_model)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 6, | ||
"metadata": { | ||
"colab": { | ||
"base_uri": "https://localhost:8080/" | ||
}, | ||
"id": "YkUzYb9dmcXO", | ||
"outputId": "a99d48c9-55c2-42a4-aafc-750ce117719a" | ||
}, | ||
"outputs": [ | ||
{ | ||
"data": { | ||
"text/plain": [ | ||
"{'dogs': 3.2437400817871094,\n", | ||
" 'bees': -1.3845250606536865,\n", | ||
" 'ants': -1.5448365211486816,\n", | ||
" 'cats': -2.4362499713897705}" | ||
] | ||
}, | ||
"execution_count": 6, | ||
"metadata": {}, | ||
"output_type": "execute_result" | ||
} | ||
], | ||
"source": [ | ||
"import time\n", | ||
"import numpy as np\n", | ||
"import onnxruntime as ort\n", | ||
"from PIL import Image\n", | ||
"\n", | ||
"def preprocess(img):\n", | ||
" '''The function takes loaded image and returns processed tensor.'''\n", | ||
" img = np.array(img.resize((256, 256))).astype(np.float32)\n", | ||
" #center crop\n", | ||
" rm_pad = (256-224)//2\n", | ||
" img = img[rm_pad:-rm_pad,rm_pad:-rm_pad]\n", | ||
" #normalize by mean + std\n", | ||
" img = (img / 255. - np.array([0.485, 0.456, 0.406])) / np.array([0.229, 0.224, 0.225])\n", | ||
" img = img.transpose((2, 0, 1))\n", | ||
" img = np.expand_dims(img, axis=0)\n", | ||
" return img\n", | ||
"\n", | ||
"def predict(path):\n", | ||
" img = Image.open(path)\n", | ||
" img_batch = preprocess(img)\n", | ||
" outputs = ort_session.run(None, {\"input\": img_batch.astype(np.float32)})\n", | ||
" a = np.argsort(-outputs[0].flatten())\n", | ||
" results = {}\n", | ||
" for i in a[0:5]:\n", | ||
" results[labels[i]]=float(outputs[0][0][i])\n", | ||
" return results\n", | ||
"\n", | ||
"ort_session = ort.InferenceSession(\"model_ft.onnx\", providers=['CPUExecutionProvider'])\n", | ||
"\n", | ||
"labels = class_names\n", | ||
"image_path = \"/tmp/hymenoptera_data/dog.jpg\"\n", | ||
"predict(image_path)" | ||
] | ||
} | ||
], | ||
"metadata": { | ||
"colab": { | ||
"provenance": [] | ||
}, | ||
"kernelspec": { | ||
"display_name": "Python 3", | ||
"name": "python3" | ||
}, | ||
"language_info": { | ||
"name": "python" | ||
} | ||
}, | ||
"nbformat": 4, | ||
"nbformat_minor": 0 | ||
} |
Oops, something went wrong.