Hijiki-HF commited on
Commit
1a2153f
·
1 Parent(s): 2cfe77b
Files changed (5) hide show
  1. .gitignore +2 -0
  2. Dockerfile +10 -0
  3. README.md +7 -0
  4. practice/practice.ipynb +883 -0
  5. practice/practice.py +1 -0
.gitignore CHANGED
@@ -1,3 +1,5 @@
 
 
1
  # Byte-compiled / optimized / DLL files
2
  __pycache__/
3
  *.py[cod]
 
1
+ models/
2
+
3
  # Byte-compiled / optimized / DLL files
4
  __pycache__/
5
  *.py[cod]
Dockerfile ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.11
2
+
3
+ RUN apt update
4
+ RUN apt install -y gcc cmake wget
5
+
6
+ RUN pip install --upgrade pip
7
+
8
+ WORKDIR /work
9
+
10
+ CMD ["/bin/bash"]
README.md CHANGED
@@ -12,3 +12,10 @@ LLMやBERTなどの自然言語処理技術を使ったプロジェクトの練
12
  4. あらすじより長い文章を入力できるようにする。
13
  5. gradioなどで公開する。
14
  6. Google Cloudにデプロイ。
 
 
 
 
 
 
 
 
12
  4. あらすじより長い文章を入力できるようにする。
13
  5. gradioなどで公開する。
14
  6. Google Cloudにデプロイ。
15
+
16
+ ## 環境
17
+ - M1 MacBook Air(2020)
18
+ - Python 3.11.9
19
+ - llama-cpp-python
20
+ - コマンドラインツールだけでなく、XCodeのアプリ自体もインストール
21
+ - brewでcmakeをインストール
practice/practice.ipynb ADDED
@@ -0,0 +1,883 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 4,
6
+ "metadata": {},
7
+ "outputs": [
8
+ {
9
+ "name": "stderr",
10
+ "output_type": "stream",
11
+ "text": [
12
+ "llama_load_model_from_file: using device Metal (Apple M1) - 17592186044388 MiB free\n",
13
+ "llama_model_loader: loaded meta data with 22 key-value pairs and 291 tensors from ../models/Llama-3-ELYZA-JP-8B-q4_k_m.gguf (version GGUF V3 (latest))\n",
14
+ "llama_model_loader: Dumping metadata keys/values. Note: KV overrides do not apply in this output.\n",
15
+ "llama_model_loader: - kv 0: general.architecture str = llama\n",
16
+ "llama_model_loader: - kv 1: general.name str = Llama-3-8B-optimal-merged-stage2\n",
17
+ "llama_model_loader: - kv 2: llama.block_count u32 = 32\n",
18
+ "llama_model_loader: - kv 3: llama.context_length u32 = 8192\n",
19
+ "llama_model_loader: - kv 4: llama.embedding_length u32 = 4096\n",
20
+ "llama_model_loader: - kv 5: llama.feed_forward_length u32 = 14336\n",
21
+ "llama_model_loader: - kv 6: llama.attention.head_count u32 = 32\n",
22
+ "llama_model_loader: - kv 7: llama.attention.head_count_kv u32 = 8\n",
23
+ "llama_model_loader: - kv 8: llama.rope.freq_base f32 = 500000.000000\n",
24
+ "llama_model_loader: - kv 9: llama.attention.layer_norm_rms_epsilon f32 = 0.000010\n",
25
+ "llama_model_loader: - kv 10: general.file_type u32 = 15\n",
26
+ "llama_model_loader: - kv 11: llama.vocab_size u32 = 128256\n",
27
+ "llama_model_loader: - kv 12: llama.rope.dimension_count u32 = 128\n",
28
+ "llama_model_loader: - kv 13: tokenizer.ggml.model str = gpt2\n",
29
+ "llama_model_loader: - kv 14: tokenizer.ggml.pre str = llama-bpe\n",
30
+ "llama_model_loader: - kv 15: tokenizer.ggml.tokens arr[str,128256] = [\"!\", \"\\\"\", \"#\", \"$\", \"%\", \"&\", \"'\", ...\n",
31
+ "llama_model_loader: - kv 16: tokenizer.ggml.token_type arr[i32,128256] = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, ...\n",
32
+ "llama_model_loader: - kv 17: tokenizer.ggml.merges arr[str,280147] = [\"Ġ Ġ\", \"Ġ ĠĠĠ\", \"ĠĠ ĠĠ\", \"...\n",
33
+ "llama_model_loader: - kv 18: tokenizer.ggml.bos_token_id u32 = 128000\n",
34
+ "llama_model_loader: - kv 19: tokenizer.ggml.eos_token_id u32 = 128009\n",
35
+ "llama_model_loader: - kv 20: tokenizer.chat_template str = {% set loop_messages = messages %}{% ...\n",
36
+ "llama_model_loader: - kv 21: general.quantization_version u32 = 2\n",
37
+ "llama_model_loader: - type f32: 65 tensors\n",
38
+ "llama_model_loader: - type q4_K: 193 tensors\n",
39
+ "llama_model_loader: - type q6_K: 33 tensors\n",
40
+ "llm_load_vocab: control token: 128255 '<|reserved_special_token_250|>' is not marked as EOG\n",
41
+ "llm_load_vocab: control token: 128253 '<|reserved_special_token_248|>' is not marked as EOG\n",
42
+ "llm_load_vocab: control token: 128251 '<|reserved_special_token_246|>' is not marked as EOG\n",
43
+ "llm_load_vocab: control token: 128249 '<|reserved_special_token_244|>' is not marked as EOG\n",
44
+ "llm_load_vocab: control token: 128248 '<|reserved_special_token_243|>' is not marked as EOG\n",
45
+ "llm_load_vocab: control token: 128247 '<|reserved_special_token_242|>' is not marked as EOG\n",
46
+ "llm_load_vocab: control token: 128245 '<|reserved_special_token_240|>' is not marked as EOG\n",
47
+ "llm_load_vocab: control token: 128244 '<|reserved_special_token_239|>' is not marked as EOG\n",
48
+ "llm_load_vocab: control token: 128242 '<|reserved_special_token_237|>' is not marked as EOG\n",
49
+ "llm_load_vocab: control token: 128241 '<|reserved_special_token_236|>' is not marked as EOG\n",
50
+ "llm_load_vocab: control token: 128240 '<|reserved_special_token_235|>' is not marked as EOG\n",
51
+ "llm_load_vocab: control token: 128237 '<|reserved_special_token_232|>' is not marked as EOG\n",
52
+ "llm_load_vocab: control token: 128235 '<|reserved_special_token_230|>' is not marked as EOG\n",
53
+ "llm_load_vocab: control token: 128232 '<|reserved_special_token_227|>' is not marked as EOG\n",
54
+ "llm_load_vocab: control token: 128231 '<|reserved_special_token_226|>' is not marked as EOG\n",
55
+ "llm_load_vocab: control token: 128226 '<|reserved_special_token_221|>' is not marked as EOG\n",
56
+ "llm_load_vocab: control token: 128224 '<|reserved_special_token_219|>' is not marked as EOG\n",
57
+ "llm_load_vocab: control token: 128223 '<|reserved_special_token_218|>' is not marked as EOG\n",
58
+ "llm_load_vocab: control token: 128221 '<|reserved_special_token_216|>' is not marked as EOG\n",
59
+ "llm_load_vocab: control token: 128220 '<|reserved_special_token_215|>' is not marked as EOG\n",
60
+ "llm_load_vocab: control token: 128218 '<|reserved_special_token_213|>' is not marked as EOG\n",
61
+ "llm_load_vocab: control token: 128216 '<|reserved_special_token_211|>' is not marked as EOG\n",
62
+ "llm_load_vocab: control token: 128215 '<|reserved_special_token_210|>' is not marked as EOG\n",
63
+ "llm_load_vocab: control token: 128214 '<|reserved_special_token_209|>' is not marked as EOG\n",
64
+ "llm_load_vocab: control token: 128213 '<|reserved_special_token_208|>' is not marked as EOG\n",
65
+ "llm_load_vocab: control token: 128212 '<|reserved_special_token_207|>' is not marked as EOG\n",
66
+ "llm_load_vocab: control token: 128210 '<|reserved_special_token_205|>' is not marked as EOG\n",
67
+ "llm_load_vocab: control token: 128208 '<|reserved_special_token_203|>' is not marked as EOG\n",
68
+ "llm_load_vocab: control token: 128207 '<|reserved_special_token_202|>' is not marked as EOG\n",
69
+ "llm_load_vocab: control token: 128206 '<|reserved_special_token_201|>' is not marked as EOG\n",
70
+ "llm_load_vocab: control token: 128205 '<|reserved_special_token_200|>' is not marked as EOG\n",
71
+ "llm_load_vocab: control token: 128204 '<|reserved_special_token_199|>' is not marked as EOG\n",
72
+ "llm_load_vocab: control token: 128201 '<|reserved_special_token_196|>' is not marked as EOG\n",
73
+ "llm_load_vocab: control token: 128199 '<|reserved_special_token_194|>' is not marked as EOG\n",
74
+ "llm_load_vocab: control token: 128194 '<|reserved_special_token_189|>' is not marked as EOG\n",
75
+ "llm_load_vocab: control token: 128192 '<|reserved_special_token_187|>' is not marked as EOG\n",
76
+ "llm_load_vocab: control token: 128191 '<|reserved_special_token_186|>' is not marked as EOG\n",
77
+ "llm_load_vocab: control token: 128188 '<|reserved_special_token_183|>' is not marked as EOG\n",
78
+ "llm_load_vocab: control token: 128187 '<|reserved_special_token_182|>' is not marked as EOG\n",
79
+ "llm_load_vocab: control token: 128185 '<|reserved_special_token_180|>' is not marked as EOG\n",
80
+ "llm_load_vocab: control token: 128184 '<|reserved_special_token_179|>' is not marked as EOG\n",
81
+ "llm_load_vocab: control token: 128182 '<|reserved_special_token_177|>' is not marked as EOG\n",
82
+ "llm_load_vocab: control token: 128181 '<|reserved_special_token_176|>' is not marked as EOG\n",
83
+ "llm_load_vocab: control token: 128180 '<|reserved_special_token_175|>' is not marked as EOG\n",
84
+ "llm_load_vocab: control token: 128175 '<|reserved_special_token_170|>' is not marked as EOG\n",
85
+ "llm_load_vocab: control token: 128174 '<|reserved_special_token_169|>' is not marked as EOG\n",
86
+ "llm_load_vocab: control token: 128173 '<|reserved_special_token_168|>' is not marked as EOG\n",
87
+ "llm_load_vocab: control token: 128172 '<|reserved_special_token_167|>' is not marked as EOG\n",
88
+ "llm_load_vocab: control token: 128171 '<|reserved_special_token_166|>' is not marked as EOG\n",
89
+ "llm_load_vocab: control token: 128170 '<|reserved_special_token_165|>' is not marked as EOG\n",
90
+ "llm_load_vocab: control token: 128169 '<|reserved_special_token_164|>' is not marked as EOG\n",
91
+ "llm_load_vocab: control token: 128166 '<|reserved_special_token_161|>' is not marked as EOG\n",
92
+ "llm_load_vocab: control token: 128164 '<|reserved_special_token_159|>' is not marked as EOG\n",
93
+ "llm_load_vocab: control token: 128163 '<|reserved_special_token_158|>' is not marked as EOG\n",
94
+ "llm_load_vocab: control token: 128157 '<|reserved_special_token_152|>' is not marked as EOG\n",
95
+ "llm_load_vocab: control token: 128156 '<|reserved_special_token_151|>' is not marked as EOG\n",
96
+ "llm_load_vocab: control token: 128154 '<|reserved_special_token_149|>' is not marked as EOG\n",
97
+ "llm_load_vocab: control token: 128153 '<|reserved_special_token_148|>' is not marked as EOG\n",
98
+ "llm_load_vocab: control token: 128151 '<|reserved_special_token_146|>' is not marked as EOG\n",
99
+ "llm_load_vocab: control token: 128149 '<|reserved_special_token_144|>' is not marked as EOG\n",
100
+ "llm_load_vocab: control token: 128148 '<|reserved_special_token_143|>' is not marked as EOG\n",
101
+ "llm_load_vocab: control token: 128147 '<|reserved_special_token_142|>' is not marked as EOG\n",
102
+ "llm_load_vocab: control token: 128144 '<|reserved_special_token_139|>' is not marked as EOG\n",
103
+ "llm_load_vocab: control token: 128141 '<|reserved_special_token_136|>' is not marked as EOG\n",
104
+ "llm_load_vocab: control token: 128139 '<|reserved_special_token_134|>' is not marked as EOG\n",
105
+ "llm_load_vocab: control token: 128138 '<|reserved_special_token_133|>' is not marked as EOG\n",
106
+ "llm_load_vocab: control token: 128137 '<|reserved_special_token_132|>' is not marked as EOG\n",
107
+ "llm_load_vocab: control token: 128130 '<|reserved_special_token_125|>' is not marked as EOG\n",
108
+ "llm_load_vocab: control token: 128127 '<|reserved_special_token_122|>' is not marked as EOG\n",
109
+ "llm_load_vocab: control token: 128125 '<|reserved_special_token_120|>' is not marked as EOG\n",
110
+ "llm_load_vocab: control token: 128124 '<|reserved_special_token_119|>' is not marked as EOG\n",
111
+ "llm_load_vocab: control token: 128123 '<|reserved_special_token_118|>' is not marked as EOG\n",
112
+ "llm_load_vocab: control token: 128122 '<|reserved_special_token_117|>' is not marked as EOG\n",
113
+ "llm_load_vocab: control token: 128121 '<|reserved_special_token_116|>' is not marked as EOG\n",
114
+ "llm_load_vocab: control token: 128120 '<|reserved_special_token_115|>' is not marked as EOG\n",
115
+ "llm_load_vocab: control token: 128119 '<|reserved_special_token_114|>' is not marked as EOG\n",
116
+ "llm_load_vocab: control token: 128118 '<|reserved_special_token_113|>' is not marked as EOG\n",
117
+ "llm_load_vocab: control token: 128117 '<|reserved_special_token_112|>' is not marked as EOG\n",
118
+ "llm_load_vocab: control token: 128116 '<|reserved_special_token_111|>' is not marked as EOG\n",
119
+ "llm_load_vocab: control token: 128113 '<|reserved_special_token_108|>' is not marked as EOG\n",
120
+ "llm_load_vocab: control token: 128112 '<|reserved_special_token_107|>' is not marked as EOG\n",
121
+ "llm_load_vocab: control token: 128111 '<|reserved_special_token_106|>' is not marked as EOG\n",
122
+ "llm_load_vocab: control token: 128110 '<|reserved_special_token_105|>' is not marked as EOG\n",
123
+ "llm_load_vocab: control token: 128108 '<|reserved_special_token_103|>' is not marked as EOG\n",
124
+ "llm_load_vocab: control token: 128107 '<|reserved_special_token_102|>' is not marked as EOG\n",
125
+ "llm_load_vocab: control token: 128104 '<|reserved_special_token_99|>' is not marked as EOG\n",
126
+ "llm_load_vocab: control token: 128103 '<|reserved_special_token_98|>' is not marked as EOG\n",
127
+ "llm_load_vocab: control token: 128102 '<|reserved_special_token_97|>' is not marked as EOG\n",
128
+ "llm_load_vocab: control token: 128101 '<|reserved_special_token_96|>' is not marked as EOG\n",
129
+ "llm_load_vocab: control token: 128100 '<|reserved_special_token_95|>' is not marked as EOG\n",
130
+ "llm_load_vocab: control token: 128097 '<|reserved_special_token_92|>' is not marked as EOG\n",
131
+ "llm_load_vocab: control token: 128094 '<|reserved_special_token_89|>' is not marked as EOG\n",
132
+ "llm_load_vocab: control token: 128093 '<|reserved_special_token_88|>' is not marked as EOG\n",
133
+ "llm_load_vocab: control token: 128091 '<|reserved_special_token_86|>' is not marked as EOG\n",
134
+ "llm_load_vocab: control token: 128090 '<|reserved_special_token_85|>' is not marked as EOG\n",
135
+ "llm_load_vocab: control token: 128087 '<|reserved_special_token_82|>' is not marked as EOG\n",
136
+ "llm_load_vocab: control token: 128086 '<|reserved_special_token_81|>' is not marked as EOG\n",
137
+ "llm_load_vocab: control token: 128084 '<|reserved_special_token_79|>' is not marked as EOG\n",
138
+ "llm_load_vocab: control token: 128082 '<|reserved_special_token_77|>' is not marked as EOG\n",
139
+ "llm_load_vocab: control token: 128077 '<|reserved_special_token_72|>' is not marked as EOG\n",
140
+ "llm_load_vocab: control token: 128074 '<|reserved_special_token_69|>' is not marked as EOG\n",
141
+ "llm_load_vocab: control token: 128073 '<|reserved_special_token_68|>' is not marked as EOG\n",
142
+ "llm_load_vocab: control token: 128070 '<|reserved_special_token_65|>' is not marked as EOG\n",
143
+ "llm_load_vocab: control token: 128067 '<|reserved_special_token_62|>' is not marked as EOG\n",
144
+ "llm_load_vocab: control token: 128066 '<|reserved_special_token_61|>' is not marked as EOG\n",
145
+ "llm_load_vocab: control token: 128064 '<|reserved_special_token_59|>' is not marked as EOG\n",
146
+ "llm_load_vocab: control token: 128061 '<|reserved_special_token_56|>' is not marked as EOG\n",
147
+ "llm_load_vocab: control token: 128059 '<|reserved_special_token_54|>' is not marked as EOG\n",
148
+ "llm_load_vocab: control token: 128058 '<|reserved_special_token_53|>' is not marked as EOG\n",
149
+ "llm_load_vocab: control token: 128057 '<|reserved_special_token_52|>' is not marked as EOG\n",
150
+ "llm_load_vocab: control token: 128051 '<|reserved_special_token_46|>' is not marked as EOG\n",
151
+ "llm_load_vocab: control token: 128042 '<|reserved_special_token_37|>' is not marked as EOG\n",
152
+ "llm_load_vocab: control token: 128041 '<|reserved_special_token_36|>' is not marked as EOG\n",
153
+ "llm_load_vocab: control token: 128040 '<|reserved_special_token_35|>' is not marked as EOG\n",
154
+ "llm_load_vocab: control token: 128039 '<|reserved_special_token_34|>' is not marked as EOG\n",
155
+ "llm_load_vocab: control token: 128035 '<|reserved_special_token_30|>' is not marked as EOG\n",
156
+ "llm_load_vocab: control token: 128034 '<|reserved_special_token_29|>' is not marked as EOG\n",
157
+ "llm_load_vocab: control token: 128032 '<|reserved_special_token_27|>' is not marked as EOG\n",
158
+ "llm_load_vocab: control token: 128031 '<|reserved_special_token_26|>' is not marked as EOG\n",
159
+ "llm_load_vocab: control token: 128030 '<|reserved_special_token_25|>' is not marked as EOG\n",
160
+ "llm_load_vocab: control token: 128029 '<|reserved_special_token_24|>' is not marked as EOG\n",
161
+ "llm_load_vocab: control token: 128027 '<|reserved_special_token_22|>' is not marked as EOG\n",
162
+ "llm_load_vocab: control token: 128026 '<|reserved_special_token_21|>' is not marked as EOG\n",
163
+ "llm_load_vocab: control token: 128025 '<|reserved_special_token_20|>' is not marked as EOG\n",
164
+ "llm_load_vocab: control token: 128023 '<|reserved_special_token_18|>' is not marked as EOG\n",
165
+ "llm_load_vocab: control token: 128022 '<|reserved_special_token_17|>' is not marked as EOG\n",
166
+ "llm_load_vocab: control token: 128021 '<|reserved_special_token_16|>' is not marked as EOG\n",
167
+ "llm_load_vocab: control token: 128019 '<|reserved_special_token_14|>' is not marked as EOG\n",
168
+ "llm_load_vocab: control token: 128017 '<|reserved_special_token_12|>' is not marked as EOG\n",
169
+ "llm_load_vocab: control token: 128014 '<|reserved_special_token_9|>' is not marked as EOG\n",
170
+ "llm_load_vocab: control token: 128013 '<|reserved_special_token_8|>' is not marked as EOG\n",
171
+ "llm_load_vocab: control token: 128012 '<|reserved_special_token_7|>' is not marked as EOG\n",
172
+ "llm_load_vocab: control token: 128011 '<|reserved_special_token_6|>' is not marked as EOG\n",
173
+ "llm_load_vocab: control token: 128010 '<|reserved_special_token_5|>' is not marked as EOG\n",
174
+ "llm_load_vocab: control token: 128006 '<|start_header_id|>' is not marked as EOG\n",
175
+ "llm_load_vocab: control token: 128005 '<|reserved_special_token_3|>' is not marked as EOG\n",
176
+ "llm_load_vocab: control token: 128003 '<|reserved_special_token_1|>' is not marked as EOG\n",
177
+ "llm_load_vocab: control token: 128002 '<|reserved_special_token_0|>' is not marked as EOG\n",
178
+ "llm_load_vocab: control token: 128000 '<|begin_of_text|>' is not marked as EOG\n",
179
+ "llm_load_vocab: control token: 128038 '<|reserved_special_token_33|>' is not marked as EOG\n",
180
+ "llm_load_vocab: control token: 128060 '<|reserved_special_token_55|>' is not marked as EOG\n",
181
+ "llm_load_vocab: control token: 128043 '<|reserved_special_token_38|>' is not marked as EOG\n",
182
+ "llm_load_vocab: control token: 128007 '<|end_header_id|>' is not marked as EOG\n",
183
+ "llm_load_vocab: control token: 128062 '<|reserved_special_token_57|>' is not marked as EOG\n",
184
+ "llm_load_vocab: control token: 128168 '<|reserved_special_token_163|>' is not marked as EOG\n",
185
+ "llm_load_vocab: control token: 128159 '<|reserved_special_token_154|>' is not marked as EOG\n",
186
+ "llm_load_vocab: control token: 128162 '<|reserved_special_token_157|>' is not marked as EOG\n",
187
+ "llm_load_vocab: control token: 128054 '<|reserved_special_token_49|>' is not marked as EOG\n",
188
+ "llm_load_vocab: control token: 128047 '<|reserved_special_token_42|>' is not marked as EOG\n",
189
+ "llm_load_vocab: control token: 128053 '<|reserved_special_token_48|>' is not marked as EOG\n",
190
+ "llm_load_vocab: control token: 128227 '<|reserved_special_token_222|>' is not marked as EOG\n",
191
+ "llm_load_vocab: control token: 128095 '<|reserved_special_token_90|>' is not marked as EOG\n",
192
+ "llm_load_vocab: control token: 128150 '<|reserved_special_token_145|>' is not marked as EOG\n",
193
+ "llm_load_vocab: control token: 128081 '<|reserved_special_token_76|>' is not marked as EOG\n",
194
+ "llm_load_vocab: control token: 128079 '<|reserved_special_token_74|>' is not marked as EOG\n",
195
+ "llm_load_vocab: control token: 128099 '<|reserved_special_token_94|>' is not marked as EOG\n",
196
+ "llm_load_vocab: control token: 128250 '<|reserved_special_token_245|>' is not marked as EOG\n",
197
+ "llm_load_vocab: control token: 128176 '<|reserved_special_token_171|>' is not marked as EOG\n",
198
+ "llm_load_vocab: control token: 128068 '<|reserved_special_token_63|>' is not marked as EOG\n",
199
+ "llm_load_vocab: control token: 128132 '<|reserved_special_token_127|>' is not marked as EOG\n",
200
+ "llm_load_vocab: control token: 128158 '<|reserved_special_token_153|>' is not marked as EOG\n",
201
+ "llm_load_vocab: control token: 128161 '<|reserved_special_token_156|>' is not marked as EOG\n",
202
+ "llm_load_vocab: control token: 128131 '<|reserved_special_token_126|>' is not marked as EOG\n",
203
+ "llm_load_vocab: control token: 128246 '<|reserved_special_token_241|>' is not marked as EOG\n",
204
+ "llm_load_vocab: control token: 128254 '<|reserved_special_token_249|>' is not marked as EOG\n",
205
+ "llm_load_vocab: control token: 128033 '<|reserved_special_token_28|>' is not marked as EOG\n",
206
+ "llm_load_vocab: control token: 128145 '<|reserved_special_token_140|>' is not marked as EOG\n",
207
+ "llm_load_vocab: control token: 128178 '<|reserved_special_token_173|>' is not marked as EOG\n",
208
+ "llm_load_vocab: control token: 128219 '<|reserved_special_token_214|>' is not marked as EOG\n",
209
+ "llm_load_vocab: control token: 128072 '<|reserved_special_token_67|>' is not marked as EOG\n",
210
+ "llm_load_vocab: control token: 128238 '<|reserved_special_token_233|>' is not marked as EOG\n",
211
+ "llm_load_vocab: control token: 128048 '<|reserved_special_token_43|>' is not marked as EOG\n",
212
+ "llm_load_vocab: control token: 128065 '<|reserved_special_token_60|>' is not marked as EOG\n",
213
+ "llm_load_vocab: control token: 128146 '<|reserved_special_token_141|>' is not marked as EOG\n",
214
+ "llm_load_vocab: control token: 128198 '<|reserved_special_token_193|>' is not marked as EOG\n",
215
+ "llm_load_vocab: control token: 128055 '<|reserved_special_token_50|>' is not marked as EOG\n",
216
+ "llm_load_vocab: control token: 128143 '<|reserved_special_token_138|>' is not marked as EOG\n",
217
+ "llm_load_vocab: control token: 128140 '<|reserved_special_token_135|>' is not marked as EOG\n",
218
+ "llm_load_vocab: control token: 128020 '<|reserved_special_token_15|>' is not marked as EOG\n",
219
+ "llm_load_vocab: control token: 128036 '<|reserved_special_token_31|>' is not marked as EOG\n",
220
+ "llm_load_vocab: control token: 128129 '<|reserved_special_token_124|>' is not marked as EOG\n",
221
+ "llm_load_vocab: control token: 128098 '<|reserved_special_token_93|>' is not marked as EOG\n",
222
+ "llm_load_vocab: control token: 128209 '<|reserved_special_token_204|>' is not marked as EOG\n",
223
+ "llm_load_vocab: control token: 128186 '<|reserved_special_token_181|>' is not marked as EOG\n",
224
+ "llm_load_vocab: control token: 128222 '<|reserved_special_token_217|>' is not marked as EOG\n",
225
+ "llm_load_vocab: control token: 128126 '<|reserved_special_token_121|>' is not marked as EOG\n",
226
+ "llm_load_vocab: control token: 128004 '<|reserved_special_token_2|>' is not marked as EOG\n",
227
+ "llm_load_vocab: control token: 128075 '<|reserved_special_token_70|>' is not marked as EOG\n",
228
+ "llm_load_vocab: control token: 128160 '<|reserved_special_token_155|>' is not marked as EOG\n",
229
+ "llm_load_vocab: control token: 128069 '<|reserved_special_token_64|>' is not marked as EOG\n",
230
+ "llm_load_vocab: control token: 128109 '<|reserved_special_token_104|>' is not marked as EOG\n",
231
+ "llm_load_vocab: control token: 128183 '<|reserved_special_token_178|>' is not marked as EOG\n",
232
+ "llm_load_vocab: control token: 128092 '<|reserved_special_token_87|>' is not marked as EOG\n",
233
+ "llm_load_vocab: control token: 128106 '<|reserved_special_token_101|>' is not marked as EOG\n",
234
+ "llm_load_vocab: control token: 128096 '<|reserved_special_token_91|>' is not marked as EOG\n",
235
+ "llm_load_vocab: control token: 128135 '<|reserved_special_token_130|>' is not marked as EOG\n",
236
+ "llm_load_vocab: control token: 128190 '<|reserved_special_token_185|>' is not marked as EOG\n",
237
+ "llm_load_vocab: control token: 128196 '<|reserved_special_token_191|>' is not marked as EOG\n",
238
+ "llm_load_vocab: control token: 128045 '<|reserved_special_token_40|>' is not marked as EOG\n",
239
+ "llm_load_vocab: control token: 128085 '<|reserved_special_token_80|>' is not marked as EOG\n",
240
+ "llm_load_vocab: control token: 128189 '<|reserved_special_token_184|>' is not marked as EOG\n",
241
+ "llm_load_vocab: control token: 128133 '<|reserved_special_token_128|>' is not marked as EOG\n",
242
+ "llm_load_vocab: control token: 128089 '<|reserved_special_token_84|>' is not marked as EOG\n",
243
+ "llm_load_vocab: control token: 128155 '<|reserved_special_token_150|>' is not marked as EOG\n",
244
+ "llm_load_vocab: control token: 128001 '<|end_of_text|>' is not marked as EOG\n",
245
+ "llm_load_vocab: control token: 128046 '<|reserved_special_token_41|>' is not marked as EOG\n",
246
+ "llm_load_vocab: control token: 128028 '<|reserved_special_token_23|>' is not marked as EOG\n",
247
+ "llm_load_vocab: control token: 128252 '<|reserved_special_token_247|>' is not marked as EOG\n",
248
+ "llm_load_vocab: control token: 128179 '<|reserved_special_token_174|>' is not marked as EOG\n",
249
+ "llm_load_vocab: control token: 128063 '<|reserved_special_token_58|>' is not marked as EOG\n",
250
+ "llm_load_vocab: control token: 128177 '<|reserved_special_token_172|>' is not marked as EOG\n",
251
+ "llm_load_vocab: control token: 128230 '<|reserved_special_token_225|>' is not marked as EOG\n",
252
+ "llm_load_vocab: control token: 128076 '<|reserved_special_token_71|>' is not marked as EOG\n",
253
+ "llm_load_vocab: control token: 128078 '<|reserved_special_token_73|>' is not marked as EOG\n",
254
+ "llm_load_vocab: control token: 128228 '<|reserved_special_token_223|>' is not marked as EOG\n",
255
+ "llm_load_vocab: control token: 128193 '<|reserved_special_token_188|>' is not marked as EOG\n",
256
+ "llm_load_vocab: control token: 128044 '<|reserved_special_token_39|>' is not marked as EOG\n",
257
+ "llm_load_vocab: control token: 128080 '<|reserved_special_token_75|>' is not marked as EOG\n",
258
+ "llm_load_vocab: control token: 128136 '<|reserved_special_token_131|>' is not marked as EOG\n",
259
+ "llm_load_vocab: control token: 128128 '<|reserved_special_token_123|>' is not marked as EOG\n",
260
+ "llm_load_vocab: control token: 128115 '<|reserved_special_token_110|>' is not marked as EOG\n",
261
+ "llm_load_vocab: control token: 128050 '<|reserved_special_token_45|>' is not marked as EOG\n",
262
+ "llm_load_vocab: control token: 128217 '<|reserved_special_token_212|>' is not marked as EOG\n",
263
+ "llm_load_vocab: control token: 128105 '<|reserved_special_token_100|>' is not marked as EOG\n",
264
+ "llm_load_vocab: control token: 128088 '<|reserved_special_token_83|>' is not marked as EOG\n",
265
+ "llm_load_vocab: control token: 128200 '<|reserved_special_token_195|>' is not marked as EOG\n",
266
+ "llm_load_vocab: control token: 128056 '<|reserved_special_token_51|>' is not marked as EOG\n",
267
+ "llm_load_vocab: control token: 128016 '<|reserved_special_token_11|>' is not marked as EOG\n",
268
+ "llm_load_vocab: control token: 128167 '<|reserved_special_token_162|>' is not marked as EOG\n",
269
+ "llm_load_vocab: control token: 128202 '<|reserved_special_token_197|>' is not marked as EOG\n",
270
+ "llm_load_vocab: control token: 128037 '<|reserved_special_token_32|>' is not marked as EOG\n",
271
+ "llm_load_vocab: control token: 128197 '<|reserved_special_token_192|>' is not marked as EOG\n",
272
+ "llm_load_vocab: control token: 128233 '<|reserved_special_token_228|>' is not marked as EOG\n",
273
+ "llm_load_vocab: control token: 128142 '<|reserved_special_token_137|>' is not marked as EOG\n",
274
+ "llm_load_vocab: control token: 128165 '<|reserved_special_token_160|>' is not marked as EOG\n",
275
+ "llm_load_vocab: control token: 128211 '<|reserved_special_token_206|>' is not marked as EOG\n",
276
+ "llm_load_vocab: control token: 128134 '<|reserved_special_token_129|>' is not marked as EOG\n",
277
+ "llm_load_vocab: control token: 128229 '<|reserved_special_token_224|>' is not marked as EOG\n",
278
+ "llm_load_vocab: control token: 128236 '<|reserved_special_token_231|>' is not marked as EOG\n",
279
+ "llm_load_vocab: control token: 128052 '<|reserved_special_token_47|>' is not marked as EOG\n",
280
+ "llm_load_vocab: control token: 128225 '<|reserved_special_token_220|>' is not marked as EOG\n",
281
+ "llm_load_vocab: control token: 128203 '<|reserved_special_token_198|>' is not marked as EOG\n",
282
+ "llm_load_vocab: control token: 128015 '<|reserved_special_token_10|>' is not marked as EOG\n",
283
+ "llm_load_vocab: control token: 128008 '<|reserved_special_token_4|>' is not marked as EOG\n",
284
+ "llm_load_vocab: control token: 128195 '<|reserved_special_token_190|>' is not marked as EOG\n",
285
+ "llm_load_vocab: control token: 128018 '<|reserved_special_token_13|>' is not marked as EOG\n",
286
+ "llm_load_vocab: control token: 128083 '<|reserved_special_token_78|>' is not marked as EOG\n",
287
+ "llm_load_vocab: control token: 128071 '<|reserved_special_token_66|>' is not marked as EOG\n",
288
+ "llm_load_vocab: control token: 128024 '<|reserved_special_token_19|>' is not marked as EOG\n",
289
+ "llm_load_vocab: control token: 128239 '<|reserved_special_token_234|>' is not marked as EOG\n",
290
+ "llm_load_vocab: control token: 128152 '<|reserved_special_token_147|>' is not marked as EOG\n",
291
+ "llm_load_vocab: control token: 128049 '<|reserved_special_token_44|>' is not marked as EOG\n",
292
+ "llm_load_vocab: control token: 128243 '<|reserved_special_token_238|>' is not marked as EOG\n",
293
+ "llm_load_vocab: control token: 128114 '<|reserved_special_token_109|>' is not marked as EOG\n",
294
+ "llm_load_vocab: control token: 128234 '<|reserved_special_token_229|>' is not marked as EOG\n",
295
+ "llm_load_vocab: special tokens cache size = 256\n",
296
+ "llm_load_vocab: token to piece cache size = 0.8000 MB\n",
297
+ "llm_load_print_meta: format = GGUF V3 (latest)\n",
298
+ "llm_load_print_meta: arch = llama\n",
299
+ "llm_load_print_meta: vocab type = BPE\n",
300
+ "llm_load_print_meta: n_vocab = 128256\n",
301
+ "llm_load_print_meta: n_merges = 280147\n",
302
+ "llm_load_print_meta: vocab_only = 0\n",
303
+ "llm_load_print_meta: n_ctx_train = 8192\n",
304
+ "llm_load_print_meta: n_embd = 4096\n",
305
+ "llm_load_print_meta: n_layer = 32\n",
306
+ "llm_load_print_meta: n_head = 32\n",
307
+ "llm_load_print_meta: n_head_kv = 8\n",
308
+ "llm_load_print_meta: n_rot = 128\n",
309
+ "llm_load_print_meta: n_swa = 0\n",
310
+ "llm_load_print_meta: n_embd_head_k = 128\n",
311
+ "llm_load_print_meta: n_embd_head_v = 128\n",
312
+ "llm_load_print_meta: n_gqa = 4\n",
313
+ "llm_load_print_meta: n_embd_k_gqa = 1024\n",
314
+ "llm_load_print_meta: n_embd_v_gqa = 1024\n",
315
+ "llm_load_print_meta: f_norm_eps = 0.0e+00\n",
316
+ "llm_load_print_meta: f_norm_rms_eps = 1.0e-05\n",
317
+ "llm_load_print_meta: f_clamp_kqv = 0.0e+00\n",
318
+ "llm_load_print_meta: f_max_alibi_bias = 0.0e+00\n",
319
+ "llm_load_print_meta: f_logit_scale = 0.0e+00\n",
320
+ "llm_load_print_meta: n_ff = 14336\n",
321
+ "llm_load_print_meta: n_expert = 0\n",
322
+ "llm_load_print_meta: n_expert_used = 0\n",
323
+ "llm_load_print_meta: causal attn = 1\n",
324
+ "llm_load_print_meta: pooling type = 0\n",
325
+ "llm_load_print_meta: rope type = 0\n",
326
+ "llm_load_print_meta: rope scaling = linear\n",
327
+ "llm_load_print_meta: freq_base_train = 500000.0\n",
328
+ "llm_load_print_meta: freq_scale_train = 1\n",
329
+ "llm_load_print_meta: n_ctx_orig_yarn = 8192\n",
330
+ "llm_load_print_meta: rope_finetuned = unknown\n",
331
+ "llm_load_print_meta: ssm_d_conv = 0\n",
332
+ "llm_load_print_meta: ssm_d_inner = 0\n",
333
+ "llm_load_print_meta: ssm_d_state = 0\n",
334
+ "llm_load_print_meta: ssm_dt_rank = 0\n",
335
+ "llm_load_print_meta: ssm_dt_b_c_rms = 0\n",
336
+ "llm_load_print_meta: model type = 8B\n",
337
+ "llm_load_print_meta: model ftype = Q4_K - Medium\n",
338
+ "llm_load_print_meta: model params = 8.03 B\n",
339
+ "llm_load_print_meta: model size = 4.58 GiB (4.89 BPW) \n",
340
+ "llm_load_print_meta: general.name = Llama-3-8B-optimal-merged-stage2\n",
341
+ "llm_load_print_meta: BOS token = 128000 '<|begin_of_text|>'\n",
342
+ "llm_load_print_meta: EOS token = 128009 '<|eot_id|>'\n",
343
+ "llm_load_print_meta: EOT token = 128009 '<|eot_id|>'\n",
344
+ "llm_load_print_meta: LF token = 128 'Ä'\n",
345
+ "llm_load_print_meta: EOG token = 128009 '<|eot_id|>'\n",
346
+ "llm_load_print_meta: max token length = 256\n",
347
+ "llm_load_tensors: tensor 'token_embd.weight' (q4_K) (and 0 others) cannot be used with preferred buffer type CPU_AARCH64, using CPU instead\n",
348
+ "ggml_backend_metal_log_allocated_size: allocated buffer, size = 4096.00 MiB, ( 9584.69 / 5461.34)\n",
349
+ "ggml_backend_metal_log_allocated_size: warning: current allocated size is greater than the recommended max working set size\n",
350
+ "\n",
351
+ "ggml_backend_metal_log_allocated_size: allocated buffer, size = 1000.31 MiB, (10585.00 / 5461.34)\n",
352
+ "ggml_backend_metal_log_allocated_size: warning: current allocated size is greater than the recommended max working set size\n",
353
+ "llm_load_tensors: offloading 32 repeating layers to GPU\n",
354
+ "llm_load_tensors: offloading output layer to GPU\n",
355
+ "llm_load_tensors: offloaded 33/33 layers to GPU\n",
356
+ "llm_load_tensors: Metal_Mapped model buffer size = 4685.31 MiB\n",
357
+ "llm_load_tensors: CPU_Mapped model buffer size = 281.81 MiB\n",
358
+ ".......................................................................................\n",
359
+ "llama_new_context_with_model: n_seq_max = 1\n",
360
+ "llama_new_context_with_model: n_ctx = 512\n",
361
+ "llama_new_context_with_model: n_ctx_per_seq = 512\n",
362
+ "llama_new_context_with_model: n_batch = 128\n",
363
+ "llama_new_context_with_model: n_ubatch = 128\n",
364
+ "llama_new_context_with_model: flash_attn = 0\n",
365
+ "llama_new_context_with_model: freq_base = 500000.0\n",
366
+ "llama_new_context_with_model: freq_scale = 1\n",
367
+ "llama_new_context_with_model: n_ctx_per_seq (512) < n_ctx_train (8192) -- the full capacity of the model will not be utilized\n",
368
+ "ggml_metal_init: allocating\n",
369
+ "ggml_metal_init: found device: Apple M1\n",
370
+ "ggml_metal_init: picking default device: Apple M1\n",
371
+ "ggml_metal_init: using embedded metal library\n",
372
+ "ggml_metal_init: GPU name: Apple M1\n",
373
+ "ggml_metal_init: GPU family: MTLGPUFamilyApple7 (1007)\n",
374
+ "ggml_metal_init: GPU family: MTLGPUFamilyCommon3 (3003)\n",
375
+ "ggml_metal_init: GPU family: MTLGPUFamilyMetal3 (5001)\n",
376
+ "ggml_metal_init: simdgroup reduction = true\n",
377
+ "ggml_metal_init: simdgroup matrix mul. = true\n",
378
+ "ggml_metal_init: has bfloat = true\n",
379
+ "ggml_metal_init: use bfloat = false\n",
380
+ "ggml_metal_init: hasUnifiedMemory = true\n",
381
+ "ggml_metal_init: recommendedMaxWorkingSetSize = 5726.63 MB\n",
382
+ "ggml_metal_init: loaded kernel_add 0x10487d590 | th_max = 1024 | th_width = 32\n",
383
+ "ggml_metal_init: loaded kernel_add_row 0x104f3a120 | th_max = 1024 | th_width = 32\n",
384
+ "ggml_metal_init: loaded kernel_sub 0x1051058f0 | th_max = 1024 | th_width = 32\n",
385
+ "ggml_metal_init: loaded kernel_sub_row 0x104f9fcc0 | th_max = 1024 | th_width = 32\n",
386
+ "ggml_metal_init: loaded kernel_mul 0x104f9ff20 | th_max = 1024 | th_width = 32\n",
387
+ "ggml_metal_init: loaded kernel_mul_row 0x104fa0500 | th_max = 1024 | th_width = 32\n",
388
+ "ggml_metal_init: loaded kernel_div 0x10487d7f0 | th_max = 1024 | th_width = 32\n",
389
+ "ggml_metal_init: loaded kernel_div_row 0x10487ddd0 | th_max = 1024 | th_width = 32\n",
390
+ "ggml_metal_init: loaded kernel_repeat_f32 0x10487e030 | th_max = 1024 | th_width = 32\n",
391
+ "ggml_metal_init: loaded kernel_repeat_f16 0x104fa0760 | th_max = 1024 | th_width = 32\n",
392
+ "ggml_metal_init: loaded kernel_repeat_i32 0x10487e290 | th_max = 1024 | th_width = 32\n",
393
+ "ggml_metal_init: loaded kernel_repeat_i16 0x10487e570 | th_max = 1024 | th_width = 32\n",
394
+ "ggml_metal_init: loaded kernel_scale 0x10487ef10 | th_max = 1024 | th_width = 32\n",
395
+ "ggml_metal_init: loaded kernel_scale_4 0x10487f590 | th_max = 1024 | th_width = 32\n",
396
+ "ggml_metal_init: loaded kernel_clamp 0x104fa0d30 | th_max = 1024 | th_width = 32\n",
397
+ "ggml_metal_init: loaded kernel_tanh 0x104fa1210 | th_max = 1024 | th_width = 32\n",
398
+ "ggml_metal_init: loaded kernel_relu 0x104fa16f0 | th_max = 1024 | th_width = 32\n",
399
+ "ggml_metal_init: loaded kernel_sigmoid 0x1027cf550 | th_max = 1024 | th_width = 32\n",
400
+ "ggml_metal_init: loaded kernel_gelu 0x1027cfb00 | th_max = 1024 | th_width = 32\n",
401
+ "ggml_metal_init: loaded kernel_gelu_4 0x10511da30 | th_max = 1024 | th_width = 32\n",
402
+ "ggml_metal_init: loaded kernel_gelu_quick 0x104fa1bd0 | th_max = 1024 | th_width = 32\n",
403
+ "ggml_metal_init: loaded kernel_gelu_quick_4 0x10511df10 | th_max = 1024 | th_width = 32\n",
404
+ "ggml_metal_init: loaded kernel_silu 0x10511e3f0 | th_max = 1024 | th_width = 32\n",
405
+ "ggml_metal_init: loaded kernel_silu_4 0x1027d05f0 | th_max = 1024 | th_width = 32\n",
406
+ "ggml_metal_init: loaded kernel_soft_max_f16 0x104fa1e30 | th_max = 1024 | th_width = 32\n",
407
+ "ggml_metal_init: loaded kernel_soft_max_f16_4 0x104fa2090 | th_max = 1024 | th_width = 32\n",
408
+ "ggml_metal_init: loaded kernel_soft_max_f32 0x10487fde0 | th_max = 1024 | th_width = 32\n",
409
+ "ggml_metal_init: loaded kernel_soft_max_f32_4 0x10511e8d0 | th_max = 1024 | th_width = 32\n",
410
+ "ggml_metal_init: loaded kernel_diag_mask_inf 0x10511f1a0 | th_max = 1024 | th_width = 32\n",
411
+ "ggml_metal_init: loaded kernel_diag_mask_inf_8 0x104fa22f0 | th_max = 1024 | th_width = 32\n",
412
+ "ggml_metal_init: loaded kernel_get_rows_f32 0x104fa2550 | th_max = 1024 | th_width = 32\n",
413
+ "ggml_metal_init: loaded kernel_get_rows_f16 0x104880720 | th_max = 1024 | th_width = 32\n",
414
+ "ggml_metal_init: skipping kernel_get_rows_bf16 (not supported)\n",
415
+ "ggml_metal_init: loaded kernel_get_rows_q4_0 0x105104080 | th_max = 1024 | th_width = 32\n",
416
+ "ggml_metal_init: loaded kernel_get_rows_q4_1 0x104880cb0 | th_max = 1024 | th_width = 32\n",
417
+ "ggml_metal_init: loaded kernel_get_rows_q5_0 0x1027d12a0 | th_max = 1024 | th_width = 32\n",
418
+ "ggml_metal_init: loaded kernel_get_rows_q5_1 0x1027d1b90 | th_max = 1024 | th_width = 32\n",
419
+ "ggml_metal_init: loaded kernel_get_rows_q8_0 0x1048815d0 | th_max = 1024 | th_width = 32\n",
420
+ "ggml_metal_init: loaded kernel_get_rows_q2_K 0x1027d24e0 | th_max = 1024 | th_width = 32\n",
421
+ "ggml_metal_init: loaded kernel_get_rows_q3_K 0x1027d2e00 | th_max = 1024 | th_width = 32\n",
422
+ "ggml_metal_init: loaded kernel_get_rows_q4_K 0x104881f30 | th_max = 1024 | th_width = 32\n",
423
+ "ggml_metal_init: loaded kernel_get_rows_q5_K 0x1048825d0 | th_max = 1024 | th_width = 32\n",
424
+ "ggml_metal_init: loaded kernel_get_rows_q6_K 0x104fa27b0 | th_max = 1024 | th_width = 32\n",
425
+ "ggml_metal_init: loaded kernel_get_rows_iq2_xxs 0x1027d34e0 | th_max = 1024 | th_width = 32\n",
426
+ "ggml_metal_init: loaded kernel_get_rows_iq2_xs 0x1048829e0 | th_max = 1024 | th_width = 32\n",
427
+ "ggml_metal_init: loaded kernel_get_rows_iq3_xxs 0x105120270 | th_max = 1024 | th_width = 32\n",
428
+ "ggml_metal_init: loaded kernel_get_rows_iq3_s 0x1051204d0 | th_max = 1024 | th_width = 32\n",
429
+ "ggml_metal_init: loaded kernel_get_rows_iq2_s 0x104fa2a10 | th_max = 1024 | th_width = 32\n",
430
+ "ggml_metal_init: loaded kernel_get_rows_iq1_s 0x1027d3740 | th_max = 1024 | th_width = 32\n",
431
+ "ggml_metal_init: loaded kernel_get_rows_iq1_m 0x104fa2c70 | th_max = 1024 | th_width = 32\n",
432
+ "ggml_metal_init: loaded kernel_get_rows_iq4_nl 0x104fa2ed0 | th_max = 1024 | th_width = 32\n",
433
+ "ggml_metal_init: loaded kernel_get_rows_iq4_xs 0x105120d20 | th_max = 1024 | th_width = 32\n",
434
+ "ggml_metal_init: loaded kernel_get_rows_i32 0x104fa3130 | th_max = 1024 | th_width = 32\n",
435
+ "ggml_metal_init: loaded kernel_rms_norm 0x104883500 | th_max = 1024 | th_width = 32\n",
436
+ "ggml_metal_init: loaded kernel_group_norm 0x104fa3390 | th_max = 1024 | th_width = 32\n",
437
+ "ggml_metal_init: loaded kernel_norm 0x104fa35f0 | th_max = 1024 | th_width = 32\n",
438
+ "ggml_metal_init: loaded kernel_ssm_conv_f32 0x1051209e0 | th_max = 1024 | th_width = 32\n",
439
+ "ggml_metal_init: loaded kernel_ssm_scan_f32 0x104884090 | th_max = 1024 | th_width = 32\n",
440
+ "ggml_metal_init: loaded kernel_mul_mv_f32_f32 0x1048842f0 | th_max = 1024 | th_width = 32\n",
441
+ "ggml_metal_init: skipping kernel_mul_mv_bf16_f32 (not supported)\n",
442
+ "ggml_metal_init: skipping kernel_mul_mv_bf16_f32_1row (not supported)\n",
443
+ "ggml_metal_init: skipping kernel_mul_mv_bf16_f32_l4 (not supported)\n",
444
+ "ggml_metal_init: skipping kernel_mul_mv_bf16_bf16 (not supported)\n",
445
+ "ggml_metal_init: loaded kernel_mul_mv_f16_f32 0x1048847b0 | th_max = 1024 | th_width = 32\n",
446
+ "ggml_metal_init: loaded kernel_mul_mv_f16_f32_1row 0x104885180 | th_max = 1024 | th_width = 32\n",
447
+ "ggml_metal_init: loaded kernel_mul_mv_f16_f32_l4 0x104fa3850 | th_max = 1024 | th_width = 32\n",
448
+ "ggml_metal_init: loaded kernel_mul_mv_f16_f16 0x104885ac0 | th_max = 1024 | th_width = 32\n",
449
+ "ggml_metal_init: loaded kernel_mul_mv_q4_0_f32 0x104886480 | th_max = 640 | th_width = 32\n",
450
+ "ggml_metal_init: loaded kernel_mul_mv_q4_1_f32 0x1027d3e70 | th_max = 832 | th_width = 32\n",
451
+ "ggml_metal_init: loaded kernel_mul_mv_q5_0_f32 0x104fa3b30 | th_max = 640 | th_width = 32\n",
452
+ "ggml_metal_init: loaded kernel_mul_mv_q5_1_f32 0x104886f70 | th_max = 576 | th_width = 32\n",
453
+ "ggml_metal_init: loaded kernel_mul_mv_q8_0_f32 0x104fa3d90 | th_max = 1024 | th_width = 32\n",
454
+ "ggml_metal_init: loaded kernel_mul_mv_q2_K_f32 0x1051223c0 | th_max = 640 | th_width = 32\n",
455
+ "ggml_metal_init: loaded kernel_mul_mv_q3_K_f32 0x1051227c0 | th_max = 576 | th_width = 32\n",
456
+ "ggml_metal_init: loaded kernel_mul_mv_q4_K_f32 0x105123850 | th_max = 576 | th_width = 32\n",
457
+ "ggml_metal_init: loaded kernel_mul_mv_q5_K_f32 0x1027d40d0 | th_max = 576 | th_width = 32\n",
458
+ "ggml_metal_init: loaded kernel_mul_mv_q6_K_f32 0x1027d4330 | th_max = 1024 | th_width = 32\n",
459
+ "ggml_metal_init: loaded kernel_mul_mv_iq2_xxs_f32 0x104fa45c0 | th_max = 832 | th_width = 32\n",
460
+ "ggml_metal_init: loaded kernel_mul_mv_iq2_xs_f32 0x104887b70 | th_max = 704 | th_width = 32\n",
461
+ "ggml_metal_init: loaded kernel_mul_mv_iq3_xxs_f32 0x104888900 | th_max = 832 | th_width = 32\n",
462
+ "ggml_metal_init: loaded kernel_mul_mv_iq3_s_f32 0x104fa4820 | th_max = 640 | th_width = 32\n",
463
+ "ggml_metal_init: loaded kernel_mul_mv_iq2_s_f32 0x1027d4870 | th_max = 704 | th_width = 32\n",
464
+ "ggml_metal_init: loaded kernel_mul_mv_iq1_s_f32 0x104889360 | th_max = 448 | th_width = 32\n",
465
+ "ggml_metal_init: loaded kernel_mul_mv_iq1_m_f32 0x1027d5710 | th_max = 576 | th_width = 32\n",
466
+ "ggml_metal_init: loaded kernel_mul_mv_iq4_nl_f32 0x104fa4dc0 | th_max = 1024 | th_width = 32\n",
467
+ "ggml_metal_init: loaded kernel_mul_mv_iq4_xs_f32 0x104889770 | th_max = 1024 | th_width = 32\n",
468
+ "ggml_metal_init: loaded kernel_mul_mv_id_f32_f32 0x10488a040 | th_max = 1024 | th_width = 32\n",
469
+ "ggml_metal_init: loaded kernel_mul_mv_id_f16_f32 0x10484e1a0 | th_max = 1024 | th_width = 32\n",
470
+ "ggml_metal_init: skipping kernel_mul_mv_id_bf16_f32 (not supported)\n",
471
+ "ggml_metal_init: loaded kernel_mul_mv_id_q4_0_f32 0x1027d5970 | th_max = 832 | th_width = 32\n",
472
+ "ggml_metal_init: loaded kernel_mul_mv_id_q4_1_f32 0x1027d60c0 | th_max = 768 | th_width = 32\n",
473
+ "ggml_metal_init: loaded kernel_mul_mv_id_q5_0_f32 0x104889c40 | th_max = 576 | th_width = 32\n",
474
+ "ggml_metal_init: loaded kernel_mul_mv_id_q5_1_f32 0x1027d68f0 | th_max = 576 | th_width = 32\n",
475
+ "ggml_metal_init: loaded kernel_mul_mv_id_q8_0_f32 0x10487ced0 | th_max = 896 | th_width = 32\n",
476
+ "ggml_metal_init: loaded kernel_mul_mv_id_q2_K_f32 0x10487d2b0 | th_max = 576 | th_width = 32\n",
477
+ "ggml_metal_init: loaded kernel_mul_mv_id_q3_K_f32 0x10488b2f0 | th_max = 576 | th_width = 32\n",
478
+ "ggml_metal_init: loaded kernel_mul_mv_id_q4_K_f32 0x10488bce0 | th_max = 576 | th_width = 32\n",
479
+ "ggml_metal_init: loaded kernel_mul_mv_id_q5_K_f32 0x10488bf80 | th_max = 576 | th_width = 32\n",
480
+ "ggml_metal_init: loaded kernel_mul_mv_id_q6_K_f32 0x1027d70c0 | th_max = 1024 | th_width = 32\n",
481
+ "ggml_metal_init: loaded kernel_mul_mv_id_iq2_xxs_f32 0x1027d7890 | th_max = 768 | th_width = 32\n",
482
+ "ggml_metal_init: loaded kernel_mul_mv_id_iq2_xs_f32 0x10488d050 | th_max = 640 | th_width = 32\n",
483
+ "ggml_metal_init: loaded kernel_mul_mv_id_iq3_xxs_f32 0x10488da40 | th_max = 768 | th_width = 32\n",
484
+ "ggml_metal_init: loaded kernel_mul_mv_id_iq3_s_f32 0x10488dce0 | th_max = 640 | th_width = 32\n",
485
+ "ggml_metal_init: loaded kernel_mul_mv_id_iq2_s_f32 0x10488e560 | th_max = 640 | th_width = 32\n",
486
+ "ggml_metal_init: loaded kernel_mul_mv_id_iq1_s_f32 0x10488f760 | th_max = 448 | th_width = 32\n",
487
+ "ggml_metal_init: loaded kernel_mul_mv_id_iq1_m_f32 0x10488fe30 | th_max = 576 | th_width = 32\n",
488
+ "ggml_metal_init: loaded kernel_mul_mv_id_iq4_nl_f32 0x1051094b0 | th_max = 1024 | th_width = 32\n",
489
+ "ggml_metal_init: loaded kernel_mul_mv_id_iq4_xs_f32 0x104fa5e70 | th_max = 896 | th_width = 32\n",
490
+ "ggml_metal_init: loaded kernel_mul_mm_f32_f32 0x104891150 | th_max = 768 | th_width = 32\n",
491
+ "ggml_metal_init: loaded kernel_mul_mm_f16_f32 0x104fa63b0 | th_max = 768 | th_width = 32\n",
492
+ "ggml_metal_init: skipping kernel_mul_mm_bf16_f32 (not supported)\n",
493
+ "ggml_metal_init: loaded kernel_mul_mm_q4_0_f32 0x1051197d0 | th_max = 768 | th_width = 32\n",
494
+ "ggml_metal_init: loaded kernel_mul_mm_q4_1_f32 0x104fa6eb0 | th_max = 768 | th_width = 32\n",
495
+ "ggml_metal_init: loaded kernel_mul_mm_q5_0_f32 0x105124b10 | th_max = 768 | th_width = 32\n",
496
+ "ggml_metal_init: loaded kernel_mul_mm_q5_1_f32 0x104fa77d0 | th_max = 704 | th_width = 32\n",
497
+ "ggml_metal_init: loaded kernel_mul_mm_q8_0_f32 0x1051258c0 | th_max = 768 | th_width = 32\n",
498
+ "ggml_metal_init: loaded kernel_mul_mm_q2_K_f32 0x104fa8150 | th_max = 768 | th_width = 32\n",
499
+ "ggml_metal_init: loaded kernel_mul_mm_q3_K_f32 0x105125e20 | th_max = 768 | th_width = 32\n",
500
+ "ggml_metal_init: loaded kernel_mul_mm_q4_K_f32 0x104891680 | th_max = 768 | th_width = 32\n",
501
+ "ggml_metal_init: loaded kernel_mul_mm_q5_K_f32 0x1048918e0 | th_max = 768 | th_width = 32\n",
502
+ "ggml_metal_init: loaded kernel_mul_mm_q6_K_f32 0x104fa8aa0 | th_max = 768 | th_width = 32\n",
503
+ "ggml_metal_init: loaded kernel_mul_mm_iq2_xxs_f32 0x104892440 | th_max = 704 | th_width = 32\n",
504
+ "ggml_metal_init: loaded kernel_mul_mm_iq2_xs_f32 0x104892d10 | th_max = 768 | th_width = 32\n",
505
+ "ggml_metal_init: loaded kernel_mul_mm_iq3_xxs_f32 0x104fa93f0 | th_max = 768 | th_width = 32\n",
506
+ "ggml_metal_init: loaded kernel_mul_mm_iq3_s_f32 0x104faa460 | th_max = 768 | th_width = 32\n",
507
+ "ggml_metal_init: loaded kernel_mul_mm_iq2_s_f32 0x104fa6b70 | th_max = 768 | th_width = 32\n",
508
+ "ggml_metal_init: loaded kernel_mul_mm_iq1_s_f32 0x104893ea0 | th_max = 768 | th_width = 32\n",
509
+ "ggml_metal_init: loaded kernel_mul_mm_iq1_m_f32 0x104894ad0 | th_max = 768 | th_width = 32\n",
510
+ "ggml_metal_init: loaded kernel_mul_mm_iq4_nl_f32 0x104895370 | th_max = 768 | th_width = 32\n",
511
+ "ggml_metal_init: loaded kernel_mul_mm_iq4_xs_f32 0x104faaea0 | th_max = 768 | th_width = 32\n",
512
+ "ggml_metal_init: loaded kernel_mul_mm_id_f32_f32 0x1027d7f80 | th_max = 832 | th_width = 32\n",
513
+ "ggml_metal_init: loaded kernel_mul_mm_id_f16_f32 0x104895940 | th_max = 1024 | th_width = 32\n",
514
+ "ggml_metal_init: skipping kernel_mul_mm_id_bf16_f32 (not supported)\n",
515
+ "ggml_metal_init: loaded kernel_mul_mm_id_q4_0_f32 0x1027d8990 | th_max = 896 | th_width = 32\n",
516
+ "ggml_metal_init: loaded kernel_mul_mm_id_q4_1_f32 0x104fab100 | th_max = 896 | th_width = 32\n",
517
+ "ggml_metal_init: loaded kernel_mul_mm_id_q5_0_f32 0x104896580 | th_max = 768 | th_width = 32\n",
518
+ "ggml_metal_init: loaded kernel_mul_mm_id_q5_1_f32 0x104896d30 | th_max = 768 | th_width = 32\n",
519
+ "ggml_metal_init: loaded kernel_mul_mm_id_q8_0_f32 0x104fac3f0 | th_max = 1024 | th_width = 32\n",
520
+ "ggml_metal_init: loaded kernel_mul_mm_id_q2_K_f32 0x104897390 | th_max = 896 | th_width = 32\n",
521
+ "ggml_metal_init: loaded kernel_mul_mm_id_q3_K_f32 0x105126820 | th_max = 832 | th_width = 32\n",
522
+ "ggml_metal_init: loaded kernel_mul_mm_id_q4_K_f32 0x104897f40 | th_max = 896 | th_width = 32\n",
523
+ "ggml_metal_init: loaded kernel_mul_mm_id_q5_K_f32 0x104fad540 | th_max = 768 | th_width = 32\n",
524
+ "ggml_metal_init: loaded kernel_mul_mm_id_q6_K_f32 0x105127c70 | th_max = 832 | th_width = 32\n",
525
+ "ggml_metal_init: loaded kernel_mul_mm_id_iq2_xxs_f32 0x105127ed0 | th_max = 896 | th_width = 32\n",
526
+ "ggml_metal_init: loaded kernel_mul_mm_id_iq2_xs_f32 0x104fae170 | th_max = 1024 | th_width = 32\n",
527
+ "ggml_metal_init: loaded kernel_mul_mm_id_iq3_xxs_f32 0x1048988a0 | th_max = 1024 | th_width = 32\n",
528
+ "ggml_metal_init: loaded kernel_mul_mm_id_iq3_s_f32 0x104899420 | th_max = 1024 | th_width = 32\n",
529
+ "ggml_metal_init: loaded kernel_mul_mm_id_iq2_s_f32 0x105128790 | th_max = 896 | th_width = 32\n",
530
+ "ggml_metal_init: loaded kernel_mul_mm_id_iq1_s_f32 0x104899680 | th_max = 1024 | th_width = 32\n",
531
+ "ggml_metal_init: loaded kernel_mul_mm_id_iq1_m_f32 0x104fae3d0 | th_max = 1024 | th_width = 32\n",
532
+ "ggml_metal_init: loaded kernel_mul_mm_id_iq4_nl_f32 0x104faecb0 | th_max = 896 | th_width = 32\n",
533
+ "ggml_metal_init: loaded kernel_mul_mm_id_iq4_xs_f32 0x104fafaf0 | th_max = 832 | th_width = 32\n",
534
+ "ggml_metal_init: loaded kernel_rope_norm_f32 0x10489a0f0 | th_max = 1024 | th_width = 32\n",
535
+ "ggml_metal_init: loaded kernel_rope_norm_f16 0x104fb0c20 | th_max = 1024 | th_width = 32\n",
536
+ "ggml_metal_init: loaded kernel_rope_neox_f32 0x1027d9570 | th_max = 1024 | th_width = 32\n",
537
+ "ggml_metal_init: loaded kernel_rope_neox_f16 0x104faf820 | th_max = 1024 | th_width = 32\n",
538
+ "ggml_metal_init: loaded kernel_im2col_f16 0x104fb03a0 | th_max = 1024 | th_width = 32\n",
539
+ "ggml_metal_init: loaded kernel_im2col_f32 0x104fb1800 | th_max = 1024 | th_width = 32\n",
540
+ "ggml_metal_init: loaded kernel_im2col_ext_f16 0x104fb1f40 | th_max = 1024 | th_width = 32\n",
541
+ "ggml_metal_init: loaded kernel_im2col_ext_f32 0x104fb28f0 | th_max = 1024 | th_width = 32\n",
542
+ "ggml_metal_init: loaded kernel_upscale_f32 0x104fb2b50 | th_max = 1024 | th_width = 32\n",
543
+ "ggml_metal_init: loaded kernel_pad_f32 0x104fb3c00 | th_max = 1024 | th_width = 32\n",
544
+ "ggml_metal_init: loaded kernel_timestep_embedding_f32 0x10489c290 | th_max = 1024 | th_width = 32\n",
545
+ "ggml_metal_init: loaded kernel_arange_f32 0x10489b320 | th_max = 1024 | th_width = 32\n",
546
+ "ggml_metal_init: loaded kernel_argsort_f32_i32_asc 0x1027da260 | th_max = 1024 | th_width = 32\n",
547
+ "ggml_metal_init: loaded kernel_argsort_f32_i32_desc 0x10489d220 | th_max = 1024 | th_width = 32\n",
548
+ "ggml_metal_init: loaded kernel_leaky_relu_f32 0x10489e810 | th_max = 1024 | th_width = 32\n",
549
+ "ggml_metal_init: loaded kernel_flash_attn_ext_f16_h64 0x104fb52e0 | th_max = 704 | th_width = 32\n",
550
+ "ggml_metal_init: loaded kernel_flash_attn_ext_f16_h80 0x105128f30 | th_max = 640 | th_width = 32\n",
551
+ "ggml_metal_init: loaded kernel_flash_attn_ext_f16_h96 0x10489eec0 | th_max = 576 | th_width = 32\n",
552
+ "ggml_metal_init: loaded kernel_flash_attn_ext_f16_h112 0x10489f120 | th_max = 576 | th_width = 32\n",
553
+ "ggml_metal_init: loaded kernel_flash_attn_ext_f16_h128 0x105129190 | th_max = 512 | th_width = 32\n",
554
+ "ggml_metal_init: loaded kernel_flash_attn_ext_f16_h256 0x104fb4ab0 | th_max = 512 | th_width = 32\n",
555
+ "ggml_metal_init: skipping kernel_flash_attn_ext_bf16_h64 (not supported)\n",
556
+ "ggml_metal_init: skipping kernel_flash_attn_ext_bf16_h80 (not supported)\n",
557
+ "ggml_metal_init: skipping kernel_flash_attn_ext_bf16_h96 (not supported)\n",
558
+ "ggml_metal_init: skipping kernel_flash_attn_ext_bf16_h112 (not supported)\n",
559
+ "ggml_metal_init: skipping kernel_flash_attn_ext_bf16_h128 (not supported)\n",
560
+ "ggml_metal_init: skipping kernel_flash_attn_ext_bf16_h256 (not supported)\n",
561
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q4_0_h64 0x1027daa80 | th_max = 704 | th_width = 32\n",
562
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q4_0_h80 0x10489f900 | th_max = 1024 | th_width = 32\n",
563
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q4_0_h96 0x1027db170 | th_max = 896 | th_width = 32\n",
564
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q4_0_h112 0x104fb6600 | th_max = 896 | th_width = 32\n",
565
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q4_0_h128 0x1027db840 | th_max = 832 | th_width = 32\n",
566
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q4_0_h256 0x10512a0f0 | th_max = 832 | th_width = 32\n",
567
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q4_1_h64 0x1048a0630 | th_max = 768 | th_width = 32\n",
568
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q4_1_h80 0x1048a1040 | th_max = 1024 | th_width = 32\n",
569
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q4_1_h96 0x10512aa50 | th_max = 896 | th_width = 32\n",
570
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q4_1_h112 0x10512b1e0 | th_max = 896 | th_width = 32\n",
571
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q4_1_h128 0x10512bb40 | th_max = 832 | th_width = 32\n",
572
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q4_1_h256 0x10512c2a0 | th_max = 832 | th_width = 32\n",
573
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q5_0_h64 0x10512cc30 | th_max = 576 | th_width = 32\n",
574
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q5_0_h80 0x1048a1db0 | th_max = 832 | th_width = 32\n",
575
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q5_0_h96 0x10512d6f0 | th_max = 832 | th_width = 32\n",
576
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q5_0_h112 0x104fb7020 | th_max = 832 | th_width = 32\n",
577
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q5_0_h128 0x10512dd90 | th_max = 832 | th_width = 32\n",
578
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q5_0_h256 0x10512dff0 | th_max = 832 | th_width = 32\n",
579
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q5_1_h64 0x104fb5c10 | th_max = 576 | th_width = 32\n",
580
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q5_1_h80 0x1048a28b0 | th_max = 832 | th_width = 32\n",
581
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q5_1_h96 0x104fb7830 | th_max = 832 | th_width = 32\n",
582
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q5_1_h112 0x10512e890 | th_max = 832 | th_width = 32\n",
583
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q5_1_h128 0x1048a3e00 | th_max = 768 | th_width = 32\n",
584
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q5_1_h256 0x133605d80 | th_max = 768 | th_width = 32\n",
585
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q8_0_h64 0x10512f640 | th_max = 768 | th_width = 32\n",
586
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q8_0_h80 0x1048a4590 | th_max = 1024 | th_width = 32\n",
587
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q8_0_h96 0x105130060 | th_max = 1024 | th_width = 32\n",
588
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q8_0_h112 0x1027dbb00 | th_max = 1024 | th_width = 32\n",
589
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q8_0_h128 0x1051307c0 | th_max = 896 | th_width = 32\n",
590
+ "ggml_metal_init: loaded kernel_flash_attn_ext_q8_0_h256 0x105130e40 | th_max = 896 | th_width = 32\n",
591
+ "ggml_metal_init: loaded kernel_flash_attn_ext_vec_f16_h128 0x105131500 | th_max = 1024 | th_width = 32\n",
592
+ "ggml_metal_init: skipping kernel_flash_attn_ext_vec_bf16_h128 (not supported)\n",
593
+ "ggml_metal_init: loaded kernel_flash_attn_ext_vec_q4_0_h128 0x105131ba0 | th_max = 1024 | th_width = 32\n",
594
+ "ggml_metal_init: loaded kernel_flash_attn_ext_vec_q4_1_h128 0x1048a4fb0 | th_max = 1024 | th_width = 32\n",
595
+ "ggml_metal_init: loaded kernel_flash_attn_ext_vec_q5_0_h128 0x1048a5bf0 | th_max = 832 | th_width = 32\n",
596
+ "ggml_metal_init: loaded kernel_flash_attn_ext_vec_q5_1_h128 0x104f95d60 | th_max = 832 | th_width = 32\n",
597
+ "ggml_metal_init: loaded kernel_flash_attn_ext_vec_q8_0_h128 0x104f37380 | th_max = 1024 | th_width = 32\n",
598
+ "ggml_metal_init: loaded kernel_flash_attn_ext_vec_f16_h256 0x1022d4890 | th_max = 1024 | th_width = 32\n",
599
+ "ggml_metal_init: skipping kernel_flash_attn_ext_vec_bf16_h256 (not supported)\n",
600
+ "ggml_metal_init: loaded kernel_flash_attn_ext_vec_q4_0_h256 0x1048a6210 | th_max = 1024 | th_width = 32\n",
601
+ "ggml_metal_init: loaded kernel_flash_attn_ext_vec_q4_1_h256 0x1027dc250 | th_max = 896 | th_width = 32\n",
602
+ "ggml_metal_init: loaded kernel_flash_attn_ext_vec_q5_0_h256 0x104fb9080 | th_max = 704 | th_width = 32\n",
603
+ "ggml_metal_init: loaded kernel_flash_attn_ext_vec_q5_1_h256 0x104fbafb0 | th_max = 704 | th_width = 32\n",
604
+ "ggml_metal_init: loaded kernel_flash_attn_ext_vec_q8_0_h256 0x104fbb9d0 | th_max = 896 | th_width = 32\n",
605
+ "ggml_metal_init: loaded kernel_cpy_f32_f32 0x104fba720 | th_max = 1024 | th_width = 32\n",
606
+ "ggml_metal_init: loaded kernel_cpy_f32_f16 0x104fbcd90 | th_max = 1024 | th_width = 32\n",
607
+ "ggml_metal_init: skipping kernel_cpy_f32_bf16 (not supported)\n",
608
+ "ggml_metal_init: loaded kernel_cpy_f16_f32 0x1048a7470 | th_max = 1024 | th_width = 32\n",
609
+ "ggml_metal_init: loaded kernel_cpy_f16_f16 0x1051324e0 | th_max = 1024 | th_width = 32\n",
610
+ "ggml_metal_init: skipping kernel_cpy_bf16_f32 (not supported)\n",
611
+ "ggml_metal_init: skipping kernel_cpy_bf16_bf16 (not supported)\n",
612
+ "ggml_metal_init: loaded kernel_cpy_f32_q8_0 0x1048a7e60 | th_max = 1024 | th_width = 32\n",
613
+ "ggml_metal_init: loaded kernel_cpy_f32_q4_0 0x1027dd5a0 | th_max = 1024 | th_width = 32\n",
614
+ "ggml_metal_init: loaded kernel_cpy_f32_q4_1 0x1048a80c0 | th_max = 1024 | th_width = 32\n",
615
+ "ggml_metal_init: loaded kernel_cpy_f32_q5_0 0x1048a8cb0 | th_max = 1024 | th_width = 32\n",
616
+ "ggml_metal_init: loaded kernel_cpy_f32_q5_1 0x105132740 | th_max = 1024 | th_width = 32\n",
617
+ "ggml_metal_init: loaded kernel_cpy_f32_iq4_nl 0x1048a9740 | th_max = 1024 | th_width = 32\n",
618
+ "ggml_metal_init: loaded kernel_concat 0x105133480 | th_max = 1024 | th_width = 32\n",
619
+ "ggml_metal_init: loaded kernel_sqr 0x1048aadb0 | th_max = 1024 | th_width = 32\n",
620
+ "ggml_metal_init: loaded kernel_sqrt 0x1048ab930 | th_max = 1024 | th_width = 32\n",
621
+ "ggml_metal_init: loaded kernel_sin 0x1051346e0 | th_max = 1024 | th_width = 32\n",
622
+ "ggml_metal_init: loaded kernel_cos 0x104fbe4f0 | th_max = 1024 | th_width = 32\n",
623
+ "ggml_metal_init: loaded kernel_sum_rows 0x105135010 | th_max = 1024 | th_width = 32\n",
624
+ "ggml_metal_init: loaded kernel_pool_2d_avg_f32 0x1027dd800 | th_max = 1024 | th_width = 32\n",
625
+ "ggml_metal_init: loaded kernel_pool_2d_max_f32 0x1048ab4d0 | th_max = 1024 | th_width = 32\n",
626
+ "llama_kv_cache_init: Metal KV buffer size = 64.00 MiB\n",
627
+ "llama_new_context_with_model: KV self size = 64.00 MiB, K (f16): 32.00 MiB, V (f16): 32.00 MiB\n",
628
+ "llama_new_context_with_model: CPU output buffer size = 0.49 MiB\n",
629
+ "llama_new_context_with_model: Metal compute buffer size = 64.62 MiB\n",
630
+ "llama_new_context_with_model: CPU compute buffer size = 2.25 MiB\n",
631
+ "llama_new_context_with_model: graph nodes = 1030\n",
632
+ "llama_new_context_with_model: graph splits = 2\n",
633
+ "AVX = 0 | AVX_VNNI = 0 | AVX2 = 0 | AVX512 = 0 | AVX512_VBMI = 0 | AVX512_VNNI = 0 | AVX512_BF16 = 0 | AMX_INT8 = 0 | FMA = 0 | NEON = 1 | SVE = 0 | ARM_FMA = 1 | F16C = 0 | FP16_VA = 1 | RISCV_VECT = 0 | WASM_SIMD = 0 | SSE3 = 0 | SSSE3 = 0 | VSX = 0 | MATMUL_INT8 = 0 | LLAMAFILE = 1 | \n",
634
+ "Model metadata: {'general.quantization_version': '2', 'tokenizer.chat_template': \"{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}{% endif %}\", 'tokenizer.ggml.eos_token_id': '128009', 'tokenizer.ggml.bos_token_id': '128000', 'tokenizer.ggml.pre': 'llama-bpe', 'tokenizer.ggml.model': 'gpt2', 'llama.vocab_size': '128256', 'llama.attention.head_count_kv': '8', 'llama.context_length': '8192', 'llama.attention.head_count': '32', 'general.file_type': '15', 'llama.feed_forward_length': '14336', 'llama.rope.dimension_count': '128', 'llama.rope.freq_base': '500000.000000', 'llama.embedding_length': '4096', 'general.architecture': 'llama', 'llama.attention.layer_norm_rms_epsilon': '0.000010', 'general.name': 'Llama-3-8B-optimal-merged-stage2', 'llama.block_count': '32'}\n",
635
+ "Available chat formats from metadata: chat_template.default\n"
636
+ ]
637
+ }
638
+ ],
639
+ "source": [
640
+ "from llama_cpp import Llama\n",
641
+ "\n",
642
+ "llm = Llama(\n",
643
+ " model_path=\"../models/Llama-3-ELYZA-JP-8B-q4_k_m.gguf\",\n",
644
+ " chat_format=\"llama-3\",\n",
645
+ " # n_ctx=1024,\n",
646
+ " n_batch=128,\n",
647
+ " n_gpu_layers=-1,\n",
648
+ ")"
649
+ ]
650
+ },
651
+ {
652
+ "cell_type": "code",
653
+ "execution_count": 8,
654
+ "metadata": {},
655
+ "outputs": [
656
+ {
657
+ "name": "stderr",
658
+ "output_type": "stream",
659
+ "text": [
660
+ "Llama.generate: 58 prefix-match hit, remaining 271 prompt tokens to eval\n",
661
+ "llama_perf_context_print: load time = 7094.75 ms\n",
662
+ "llama_perf_context_print: prompt eval time = 0.00 ms / 271 tokens ( 0.00 ms per token, inf tokens per second)\n",
663
+ "llama_perf_context_print: eval time = 0.00 ms / 165 runs ( 0.00 ms per token, inf tokens per second)\n",
664
+ "llama_perf_context_print: total time = 25745.42 ms / 436 tokens\n"
665
+ ]
666
+ }
667
+ ],
668
+ "source": [
669
+ "response = llm.create_chat_completion(\n",
670
+ " messages=[\n",
671
+ " {\n",
672
+ " \"role\": \"system\",\n",
673
+ " \"content\": \"あなたは誠実で優秀な日本人のアシスタントです。特に指示が無い場合は、常に日本語で回答してください。\",\n",
674
+ " },\n",
675
+ " {\n",
676
+ " \"role\": \"user\",\n",
677
+ " \"content\": \"以下の文章を要約してください。\\\n",
678
+ " # 文章\\\n",
679
+ " クリアするまで脱出不可能、ゲームオーバーは本当の“死”を意味する──。謎の次世代MMO『ソードアート・オンライン(SAO)』の“真実”を知らずログインした約一万人のユーザーと共に、その過酷なデスバトルは幕を開けた。\\\n",
680
+ " SAOに参加した一人である主人公・キリトは、いち早くこのMMOの“真実”を受け入れる。そして、ゲームの舞台となる巨大浮遊城『アインクラッド』で、パーティを組まないソロプレイヤーとして頭角をあらわしていった。\\\n",
681
+ " クリア条件である最上階層到達を目指し、熾烈な冒険(クエスト)を単独で続けるキリトだったが、レイピアの名手・女流剣士アスナの強引な誘いによって彼女とコンビを組むことになってしまう。その出会いは、キリトに運命とも呼べる契機をもたらし……。果たして、キリトはこのゲームから抜け出すことができるのか。\",\n",
682
+ " },\n",
683
+ " ],\n",
684
+ " max_tokens=1024,\n",
685
+ ")"
686
+ ]
687
+ },
688
+ {
689
+ "cell_type": "code",
690
+ "execution_count": 9,
691
+ "metadata": {},
692
+ "outputs": [
693
+ {
694
+ "name": "stdout",
695
+ "output_type": "stream",
696
+ "text": [
697
+ "以下は文章の要約です。\n",
698
+ "\n",
699
+ "謎のMMO『ソードアート・オンライン(SAO)』に約1万人のユーザーがログインしたが、ゲームの真実を知らずに脱出不可能なデスバトルが始まった。主人公のキリトは早くに真実を受け入れ、巨大浮遊城『アインクラッド』でソロプレイヤーとして活躍する。クリア条件の最上階層到達を目指すキリトは、女流剣士アスナとコンビを組むことになり、運命の出会いを果たす。果たしてキリトはこのゲームから抜け出すことができるのか。\n"
700
+ ]
701
+ }
702
+ ],
703
+ "source": [
704
+ "print(response[\"choices\"][0][\"message\"][\"content\"])"
705
+ ]
706
+ },
707
+ {
708
+ "cell_type": "code",
709
+ "execution_count": 12,
710
+ "metadata": {},
711
+ "outputs": [
712
+ {
713
+ "data": {
714
+ "text/plain": [
715
+ "{'verbose': True,\n",
716
+ " '_stack': <contextlib.ExitStack at 0x105b60ad0>,\n",
717
+ " 'numa': 0,\n",
718
+ " 'model_path': '../models/Llama-3-ELYZA-JP-8B-q4_k_m.gguf',\n",
719
+ " 'model_params': <llama_cpp.llama_cpp.llama_model_params at 0x115e5ab50>,\n",
720
+ " '_rpc_servers': None,\n",
721
+ " 'tensor_split': None,\n",
722
+ " '_c_tensor_split': None,\n",
723
+ " 'kv_overrides': None,\n",
724
+ " 'n_batch': 128,\n",
725
+ " 'n_threads': 4,\n",
726
+ " 'n_threads_batch': 8,\n",
727
+ " '_seed': 1894574933,\n",
728
+ " 'context_params': <llama_cpp.llama_cpp.llama_context_params at 0x115e59dd0>,\n",
729
+ " 'last_n_tokens_size': 64,\n",
730
+ " 'cache': None,\n",
731
+ " 'lora_base': None,\n",
732
+ " 'lora_scale': 1.0,\n",
733
+ " 'lora_path': None,\n",
734
+ " 'spm_infill': False,\n",
735
+ " '_model': <llama_cpp._internals.LlamaModel at 0x105ad6690>,\n",
736
+ " 'tokenizer_': <llama_cpp.llama_tokenizer.LlamaTokenizer at 0x115e0e450>,\n",
737
+ " '_ctx': <llama_cpp._internals.LlamaContext at 0x105b5d1d0>,\n",
738
+ " '_batch': <llama_cpp._internals.LlamaBatch at 0x10475e9d0>,\n",
739
+ " '_lora_adapter': None,\n",
740
+ " 'chat_format': 'llama-3',\n",
741
+ " 'chat_handler': None,\n",
742
+ " '_chat_handlers': {'chat_template.default': <function llama_cpp.llama_chat_format.chat_formatter_to_chat_completion_handler.<locals>.chat_completion_handler(*, llama: 'llama.Llama', messages: 'List[llama_types.ChatCompletionRequestMessage]', functions: 'Optional[List[llama_types.ChatCompletionFunction]]' = None, function_call: 'Optional[llama_types.ChatCompletionRequestFunctionCall]' = None, tools: 'Optional[List[llama_types.ChatCompletionTool]]' = None, tool_choice: 'Optional[llama_types.ChatCompletionToolChoiceOption]' = None, temperature: 'float' = 0.2, top_p: 'float' = 0.95, top_k: 'int' = 40, min_p: 'float' = 0.05, typical_p: 'float' = 1.0, stream: 'bool' = False, stop: 'Optional[Union[str, List[str]]]' = [], seed: 'Optional[int]' = None, response_format: 'Optional[llama_types.ChatCompletionRequestResponseFormat]' = None, max_tokens: 'Optional[int]' = None, presence_penalty: 'float' = 0.0, frequency_penalty: 'float' = 0.0, repeat_penalty: 'float' = 1.1, tfs_z: 'float' = 1.0, mirostat_mode: 'int' = 0, mirostat_tau: 'float' = 5.0, mirostat_eta: 'float' = 0.1, model: 'Optional[str]' = None, logits_processor: 'Optional[llama.LogitsProcessorList]' = None, grammar: 'Optional[llama.LlamaGrammar]' = None, logit_bias: 'Optional[Dict[str, float]]' = None, logprobs: 'Optional[bool]' = None, top_logprobs: 'Optional[int]' = None, **kwargs) -> 'Union[llama_types.CreateChatCompletionResponse, Iterator[llama_types.CreateChatCompletionStreamResponse]]'>},\n",
743
+ " 'draft_model': None,\n",
744
+ " '_n_vocab': 128256,\n",
745
+ " '_n_ctx': 512,\n",
746
+ " '_token_nl': 128,\n",
747
+ " '_token_eos': 128009,\n",
748
+ " '_candidates': <llama_cpp._internals.LlamaTokenDataArray at 0x115ec1450>,\n",
749
+ " 'n_tokens': 494,\n",
750
+ " 'input_ids': array([128000, 128006, 9125, 128007, 271, 30591, 112568, 15682,\n",
751
+ " 124097, 103350, 16556, 104622, 106241, 26854, 102433, 107707,\n",
752
+ " 39880, 57207, 105335, 52414, 38641, 1811, 66378, 20230,\n",
753
+ " 64467, 20379, 29295, 43568, 16995, 126513, 5486, 40053,\n",
754
+ " 20230, 102433, 102158, 16556, 113925, 39926, 72315, 1811,\n",
755
+ " 128009, 128006, 882, 128007, 271, 88852, 16144, 83125,\n",
756
+ " 30512, 31634, 103664, 39926, 72315, 1811, 310, 674,\n",
757
+ " 112053, 310, 116381, 104612, 54926, 103296, 110645, 20834,\n",
758
+ " 16937, 88367, 5486, 114567, 90962, 112164, 11972, 15682,\n",
759
+ " 117475, 16144, 2118, 102625, 863, 30512, 115552, 54926,\n",
760
+ " 17424, 1811, 105037, 236, 16144, 33671, 101083, 31640,\n",
761
+ " 8195, 46, 44620, 102741, 65575, 39880, 84477, 9458,\n",
762
+ " 110191, 118372, 10110, 7934, 46, 7705, 36761, 16144,\n",
763
+ " 2118, 89151, 103350, 863, 30512, 53283, 121140, 77750,\n",
764
+ " 76171, 56051, 103664, 15120, 32307, 107707, 108152, 38248,\n",
765
+ " 114, 11972, 19732, 55999, 20230, 106116, 103188, 100845,\n",
766
+ " 115, 26854, 68408, 22398, 66953, 127764, 15682, 106633,\n",
767
+ " 125523, 107674, 1811, 310, 16998, 46, 20230, 110284,\n",
768
+ " 56051, 122485, 103195, 122768, 35417, 9458, 62903, 37823,\n",
769
+ " 20251, 15682, 116898, 43514, 103856, 47884, 51330, 8195,\n",
770
+ " 46, 16144, 2118, 89151, 103350, 863, 114475, 76622,\n",
771
+ " 17701, 104028, 124845, 5486, 114567, 16144, 107875, 55038,\n",
772
+ " 117282, 109098, 27384, 111179, 109739, 60174, 44620, 39880,\n",
773
+ " 76171, 107059, 105404, 36761, 16556, 5486, 80805, 117675,\n",
774
+ " 30512, 103214, 17129, 100604, 102741, 42634, 57326, 108748,\n",
775
+ " 104930, 11972, 103306, 103892, 64936, 30512, 30591, 33503,\n",
776
+ " 78183, 109768, 100472, 1811, 310, 116381, 104612, 77195,\n",
777
+ " 103195, 32335, 17905, 106090, 114050, 28037, 104067, 30512,\n",
778
+ " 30832, 64467, 15024, 5486, 102448, 122, 111101, 26854,\n",
779
+ " 112798, 117126, 10110, 29220, 76739, 71634, 7705, 30512,\n",
780
+ " 110904, 106063, 16556, 106307, 105784, 62903, 37823, 20251,\n",
781
+ " 103351, 29295, 5486, 108748, 70563, 39880, 16144, 13372,\n",
782
+ " 46034, 9458, 58850, 89753, 119063, 101559, 39880, 22398,\n",
783
+ " 96452, 16144, 104195, 73686, 26854, 45918, 246, 16995,\n",
784
+ " 113468, 109453, 19732, 109713, 91482, 30512, 103214, 104004,\n",
785
+ " 100909, 117084, 114732, 109807, 20834, 38093, 16995, 15682,\n",
786
+ " 5486, 62903, 37823, 20251, 20230, 103768, 51609, 107173,\n",
787
+ " 105324, 103854, 30369, 120273, 101513, 30512, 32977, 28713,\n",
788
+ " 124949, 127891, 28873, 28713, 39926, 5486, 62903, 37823,\n",
789
+ " 20251, 15682, 51330, 114567, 55031, 113487, 76622, 121406,\n",
790
+ " 105908, 108608, 104865, 1811, 128009, 128006, 78191, 128007,\n",
791
+ " 271, 88852, 15682, 83125, 16144, 31634, 103664, 38641,\n",
792
+ " 3490, 105037, 236, 16144, 8195, 46, 44620, 102741,\n",
793
+ " 65575, 39880, 84477, 9458, 110191, 118372, 10110, 7934,\n",
794
+ " 46, 7705, 36761, 20230, 103664, 16, 32307, 107707,\n",
795
+ " 108152, 38248, 114, 11972, 29295, 77750, 76171, 56051,\n",
796
+ " 29295, 5486, 114567, 16144, 89151, 103350, 30512, 53283,\n",
797
+ " 121140, 20230, 110645, 20834, 16937, 88367, 26854, 68408,\n",
798
+ " 22398, 66953, 127764, 29295, 27704, 117864, 1811, 122768,\n",
799
+ " 35417, 16144, 62903, 37823, 20251, 15682, 103856, 47884,\n",
800
+ " 20230, 89151, 103350, 114475, 76622, 125639, 5486, 109098,\n",
801
+ " 27384, 111179, 109739, 60174, 44620, 39880, 76171, 107059,\n",
802
+ " 105404, 36761, 16556, 102741, 42634, 57326, 108748, 104930,\n",
803
+ " 11972, 103306, 76706, 109526, 235, 54926, 1811, 29220,\n",
804
+ " 104612, 77195, 16144, 32335, 17905, 106090, 114050, 28037,\n",
805
+ " 104067, 30512, 30832, 64467, 17663, 62903, 37823, 20251,\n",
806
+ " 15682, 5486, 58850, 89753, 119063, 101559, 39880, 22398,\n",
807
+ " 96452, 19732, 109713, 91482, 30512, 103214, 104004, 100909,\n",
808
+ " 115717, 5486, 103768, 51609, 16144, 20834, 38093, 16995,\n",
809
+ " 30512, 28873, 28713, 17663, 1811, 28873, 28713, 39926,\n",
810
+ " 62903, 37823, 20251, 15682, 51330, 114567, 55031, 113487,\n",
811
+ " 76622, 121406, 105908, 108608, 104865, 1811, 127173, 104028,\n",
812
+ " 108044, 108323, 19732, 97518, 89046, 47000, 107441, 108086,\n",
813
+ " 5486, 105469, 103424, 102212, 23530, 15682, 17039, 1],\n",
814
+ " dtype=int32),\n",
815
+ " 'scores': array([[0., 0., 0., ..., 0., 0., 0.],\n",
816
+ " [0., 0., 0., ..., 0., 0., 0.],\n",
817
+ " [0., 0., 0., ..., 0., 0., 0.],\n",
818
+ " ...,\n",
819
+ " [0., 0., 0., ..., 0., 0., 0.],\n",
820
+ " [0., 0., 0., ..., 0., 0., 0.],\n",
821
+ " [0., 0., 0., ..., 0., 0., 0.]], dtype=float32),\n",
822
+ " '_mirostat_mu': c_float(10.0),\n",
823
+ " 'metadata': {'general.quantization_version': '2',\n",
824
+ " 'tokenizer.chat_template': \"{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}{% endif %}\",\n",
825
+ " 'tokenizer.ggml.eos_token_id': '128009',\n",
826
+ " 'tokenizer.ggml.bos_token_id': '128000',\n",
827
+ " 'tokenizer.ggml.pre': 'llama-bpe',\n",
828
+ " 'tokenizer.ggml.model': 'gpt2',\n",
829
+ " 'llama.vocab_size': '128256',\n",
830
+ " 'llama.attention.head_count_kv': '8',\n",
831
+ " 'llama.context_length': '8192',\n",
832
+ " 'llama.attention.head_count': '32',\n",
833
+ " 'general.file_type': '15',\n",
834
+ " 'llama.feed_forward_length': '14336',\n",
835
+ " 'llama.rope.dimension_count': '128',\n",
836
+ " 'llama.rope.freq_base': '500000.000000',\n",
837
+ " 'llama.embedding_length': '4096',\n",
838
+ " 'general.architecture': 'llama',\n",
839
+ " 'llama.attention.layer_norm_rms_epsilon': '0.000010',\n",
840
+ " 'general.name': 'Llama-3-8B-optimal-merged-stage2',\n",
841
+ " 'llama.block_count': '32'},\n",
842
+ " '_sampler': <llama_cpp._internals.LlamaSampler at 0x1031ee450>}"
843
+ ]
844
+ },
845
+ "execution_count": 12,
846
+ "metadata": {},
847
+ "output_type": "execute_result"
848
+ }
849
+ ],
850
+ "source": [
851
+ "vars(llm)"
852
+ ]
853
+ },
854
+ {
855
+ "cell_type": "code",
856
+ "execution_count": null,
857
+ "metadata": {},
858
+ "outputs": [],
859
+ "source": []
860
+ }
861
+ ],
862
+ "metadata": {
863
+ "kernelspec": {
864
+ "display_name": ".venv",
865
+ "language": "python",
866
+ "name": "python3"
867
+ },
868
+ "language_info": {
869
+ "codemirror_mode": {
870
+ "name": "ipython",
871
+ "version": 3
872
+ },
873
+ "file_extension": ".py",
874
+ "mimetype": "text/x-python",
875
+ "name": "python",
876
+ "nbconvert_exporter": "python",
877
+ "pygments_lexer": "ipython3",
878
+ "version": "3.11.9"
879
+ }
880
+ },
881
+ "nbformat": 4,
882
+ "nbformat_minor": 2
883
+ }
practice/practice.py ADDED
@@ -0,0 +1 @@
 
 
1
+ print("Hello World!")