josh-oo commited on
Commit
79a033c
·
verified ·
1 Parent(s): 6b211a8

BAAI_bge-m3_contrastive_fold_0

Browse files
Files changed (2) hide show
  1. config.json +2 -7
  2. model.safetensors +2 -2
config.json CHANGED
@@ -3,12 +3,7 @@
3
  "architectures": [
4
  "AspectModelXLMRoberta"
5
  ],
6
- "aspects": [
7
- "participants",
8
- "intervention",
9
- "condition",
10
- "outcome"
11
- ],
12
  "attention_probs_dropout_prob": 0.1,
13
  "bos_token_id": 0,
14
  "classifier_dropout": null,
@@ -30,5 +25,5 @@
30
  "transformers_version": "4.48.3",
31
  "type_vocab_size": 1,
32
  "use_cache": true,
33
- "vocab_size": 250006
34
  }
 
3
  "architectures": [
4
  "AspectModelXLMRoberta"
5
  ],
6
+ "aspects": [],
 
 
 
 
 
7
  "attention_probs_dropout_prob": 0.1,
8
  "bos_token_id": 0,
9
  "classifier_dropout": null,
 
25
  "transformers_version": "4.48.3",
26
  "type_vocab_size": 1,
27
  "use_cache": true,
28
+ "vocab_size": 250002
29
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f8d5975dc304b168c5b6b876c2ced9e2dbfc6061c863f31a43f42897782f09f1
3
- size 1135563122
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec7aaced55ae5e888216a63c989b39207eae0d29001d6e3d2944cecc28f90245
3
+ size 1135554922