File size: 2,382 Bytes
12fd7d1 0a7b470 12fd7d1 0a7b470 12fd7d1 0a7b470 12fd7d1 0a7b470 12fd7d1 0a7b470 12fd7d1 0a7b470 12fd7d1 0a7b470 12fd7d1 0a7b470 12fd7d1 0a7b470 12fd7d1 0a7b470 12fd7d1 0a7b470 12fd7d1 0a7b470 12fd7d1 0a7b470 12fd7d1 0a7b470 12fd7d1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
}
},
"cells": [
{
"cell_type": "markdown",
"source": [
"# Install Dependencies"
],
"metadata": {
"id": "39AMoCOa1ckc"
}
},
{
"metadata": {
"id": "VoHxuLPu7s37"
},
"cell_type": "code",
"source": [
"! wget -q https://github.com/protocolbuffers/protobuf/releases/download/v3.19.0/protoc-3.19.0-linux-x86_64.zip\n",
"! unzip -o protoc-3.19.0-linux-x86_64.zip -d /usr/local/"
],
"outputs": [],
"execution_count": null
},
{
"cell_type": "markdown",
"source": [
"## Install LiteRT Pipeline"
],
"metadata": {
"id": "qGAaAKzYK5ei"
}
},
{
"cell_type": "code",
"source": [
"!pip install git+https://github.com/google-ai-edge/ai-edge-apis.git#subdirectory=litert_tools"
],
"metadata": {
"id": "43tAeO0AZ7zp"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "markdown",
"source": [
"# Create Pipeline from model file"
],
"metadata": {
"id": "K5okZCTgYpUd"
}
},
{
"cell_type": "code",
"source": [
"from litert_tools.pipeline import pipeline\n",
"runner = pipeline.load(\"litert-community/Qwen2.5-1.5B-Instruct\", \"Qwen2.5-1.5B-Instruct_seq128_q8_ekv1280.task\")"
],
"metadata": {
"id": "3t47HAG2tvc3"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "markdown",
"source": [
"# Generate text from model"
],
"metadata": {
"id": "dASKx_JtYXwe"
}
},
{
"cell_type": "code",
"source": [
"# Disclaimer: Model performance demonstrated with the Python API in this notebook is not representative of performance on a local device.\n",
"prompt = \"What is the capital of France?\"\n",
"output = runner.generate(prompt, max_decode_steps=None)"
],
"metadata": {
"id": "wT9BIiATkjzL"
},
"execution_count": null,
"outputs": []
}
]
}
|