mntss commited on
Commit
dc01158
·
verified ·
1 Parent(s): 604ef4b

Upload folder using huggingface_hub

Browse files
layers.0.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.0.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb9f023a3e7ca92dcda42f6e3d350b2ef7581615ee39e7033965a7fe9059d7f8
3
+ size 2164793776
layers.1.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.1.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41117014203efab6817ffe4c592c0ec95de13beb2f516dacf400a19cab6c7325
3
+ size 2164793776
layers.10.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.10.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:903f0d44eb07ea4d91927a15dcb5a683d380e2e0b2357620aeb595a262a687de
3
+ size 2164793776
layers.11.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.11.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:06df92f3e018a2247747f220a031f445185260fbcfeabcc2ae555093cd2e2d38
3
+ size 2164793776
layers.12.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.12.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5957fde141aa5bbb0392b83ef8d0d976dd52944e973184db5813fdc2df6c20d3
3
+ size 2164793776
layers.13.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.13.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df030f18c9f73dee53227b9b828cbc3f789125d363e4137e040c117e669b2e79
3
+ size 2164793776
layers.14.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.14.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:faeedba2d0abaf1b445028a3968b87bf3039ca6351d7532af6120aa5c0cecae9
3
+ size 2164793776
layers.15.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.15.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b851018249b113bfc8cd3461a9203401deddfb4f33f199cd87a8114e9484bb96
3
+ size 2164793776
layers.2.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.2.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1744bd51d287f0d01491df30689b5ca8e19cf74fabbdc442a3101abd235bd7bb
3
+ size 2164793776
layers.3.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.3.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c94aa9b07a1d7277bafedba7a7f58d37accd2dc6644372525b2cffd01b1ac17c
3
+ size 2164793776
layers.4.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.4.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2445afe851df75365f6214986a1d437fd337f0779981eae0503143671b35edf8
3
+ size 2164793776
layers.5.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.5.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a062608124cd6eaf4d8769b9bbc18056c879fe477ba8eb49ae63284ba0f39750
3
+ size 2164793776
layers.6.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.6.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74524351edffcb239feb6ada22b6a4abaeae021bdc22492cfa5cccf13e239dd6
3
+ size 2164793776
layers.7.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.7.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:052ada68482b5b885165028630a7e5ae29cd018e956828fa1ee42703143f6f9a
3
+ size 2164793776
layers.8.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.8.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d83dc2604c2f7aeb951858f7bef47d82cb91b750cbdef782e361077267cf3354
3
+ size 2164793776
layers.9.mlp/cfg.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"activation": "topk", "expansion_factor": 32, "normalize_decoder": true, "num_latents": 131072, "k": 32, "multi_topk": false, "skip_connection": true, "transcode": true, "d_in": 2048}
layers.9.mlp/sae.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8f49372a3d10faed5ca5356844d3c691c431e440a01daa8e900fbfbfc191683
3
+ size 2164793776