text-editing-coda / config.json
balhafni's picture
CODA model upload
211ef03 verified
{
"_name_or_path": "/scratch/ba63/BERT_models/bert-base-arabertv02",
"architectures": [
"BertForTokenClassificationSingleLabel"
],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "D*",
"1": "DK*",
"2": "D*KI_[\u0630\u0627]",
"3": "D*KI_[\u0646]K",
"4": "I_[ ]K*",
"5": "I_[ \u0627]K*",
"6": "I_[\u0627]K*",
"7": "I_[\u0627]KI_[\u0627]K*",
"8": "I_[\u0627]KI_[\u0644]K*",
"9": "I_[ \u0627\u0644]K*",
"10": "I_[\u0627\u0644]K*",
"11": "I_[ \u0644]K*",
"12": "I_[\u0647]K*",
"13": "I_[\u064a]K*",
"14": "K*",
"15": "K*D",
"16": "K*DK",
"17": "KDK*",
"18": "K*DKKK",
"19": "K*I_[ ]K",
"20": "KI_[ ]K*",
"21": "K*I_[\u0627]",
"22": "K*I_[\u0627]K",
"23": "KI_[\u0627 ]K*",
"24": "KI_[\u0627]K*",
"25": "K*I_[\u0627]KI_[\u0644]K",
"26": "KI_[\u0627\u0644]K*",
"27": "K*I_[\u0644]K",
"28": "KI_[\u0644]K*",
"29": "K*I_[\u0646]K",
"30": "KI_[\u0646]K*",
"31": "K*I_[\u0647]",
"32": "K*I_[\u0647]K",
"33": "K*I_[\u0648]K",
"34": "K*I_[\u064a]K",
"35": "KI_[\u064a]K*",
"36": "KKI_[ ]K*",
"37": "KKI_[ \u0627]K*",
"38": "KKI_[\u0627]K*",
"39": "KKKR_[\u0627]K*",
"40": "KKR_[\u0627]K*",
"41": "K*R_[ ]",
"42": "K*R_[\u0622]K",
"43": "K*R_[\u0627]",
"44": "KR_[\u0627]K*",
"45": "K*R_[\u0629]",
"46": "K*R_[\u062b]",
"47": "K*R_[\u062b]K",
"48": "K*R_[\u062b]KK",
"49": "K*R_[\u0630]",
"50": "K*R_[\u0638]",
"51": "K*R_[\u0639]",
"52": "K*R_[\u0642]",
"53": "K*R_[\u0642]K",
"54": "K*R_[\u0647]",
"55": "K*R_[\u0649]",
"56": "K*R_[\u064a]",
"57": "MI_[\u0627]K*",
"58": "MK*",
"59": "MK*R_[\u0629]",
"60": "MR_[\u0627]K*",
"61": "MR_[\u062b]K*",
"62": "R_[\u0622]K*",
"63": "R_[\u0627]",
"64": "R_[\u0627]K*",
"65": "R_[\u0627]K*I_[\u0644]K",
"66": "R_[\u0627]K*R_[\u062b]",
"67": "R_[\u0629]",
"68": "R_[\u062a]I_[\u0634]K*",
"69": "R_[\u062a]K*",
"70": "R_[\u062a]R_[\u0639]K*",
"71": "R_[\u062b]",
"72": "R_[\u062b]K*",
"73": "R_[\u062b]R_[\u0629]",
"74": "R_[\u0630]",
"75": "R_[\u0630]K*",
"76": "R_[\u0638]K*",
"77": "R_[\u063a]K*",
"78": "R_[\u0642]",
"79": "R_[\u0642]K*",
"80": "R_[\u0643]K*",
"81": "R_[\u0647]",
"82": "R_[\u0649]",
"83": "R_[\u064a]"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"D*": 0,
"D*KI_[\u0630\u0627]": 2,
"D*KI_[\u0646]K": 3,
"DK*": 1,
"I_[ ]K*": 4,
"I_[ \u0627]K*": 5,
"I_[ \u0627\u0644]K*": 9,
"I_[ \u0644]K*": 11,
"I_[\u0627]K*": 6,
"I_[\u0627]KI_[\u0627]K*": 7,
"I_[\u0627]KI_[\u0644]K*": 8,
"I_[\u0627\u0644]K*": 10,
"I_[\u0647]K*": 12,
"I_[\u064a]K*": 13,
"K*": 14,
"K*D": 15,
"K*DK": 16,
"K*DKKK": 18,
"K*I_[ ]K": 19,
"K*I_[\u0627]": 21,
"K*I_[\u0627]K": 22,
"K*I_[\u0627]KI_[\u0644]K": 25,
"K*I_[\u0644]K": 27,
"K*I_[\u0646]K": 29,
"K*I_[\u0647]": 31,
"K*I_[\u0647]K": 32,
"K*I_[\u0648]K": 33,
"K*I_[\u064a]K": 34,
"K*R_[ ]": 41,
"K*R_[\u0622]K": 42,
"K*R_[\u0627]": 43,
"K*R_[\u0629]": 45,
"K*R_[\u062b]": 46,
"K*R_[\u062b]K": 47,
"K*R_[\u062b]KK": 48,
"K*R_[\u0630]": 49,
"K*R_[\u0638]": 50,
"K*R_[\u0639]": 51,
"K*R_[\u0642]": 52,
"K*R_[\u0642]K": 53,
"K*R_[\u0647]": 54,
"K*R_[\u0649]": 55,
"K*R_[\u064a]": 56,
"KDK*": 17,
"KI_[ ]K*": 20,
"KI_[\u0627 ]K*": 23,
"KI_[\u0627]K*": 24,
"KI_[\u0627\u0644]K*": 26,
"KI_[\u0644]K*": 28,
"KI_[\u0646]K*": 30,
"KI_[\u064a]K*": 35,
"KKI_[ ]K*": 36,
"KKI_[ \u0627]K*": 37,
"KKI_[\u0627]K*": 38,
"KKKR_[\u0627]K*": 39,
"KKR_[\u0627]K*": 40,
"KR_[\u0627]K*": 44,
"MI_[\u0627]K*": 57,
"MK*": 58,
"MK*R_[\u0629]": 59,
"MR_[\u0627]K*": 60,
"MR_[\u062b]K*": 61,
"R_[\u0622]K*": 62,
"R_[\u0627]": 63,
"R_[\u0627]K*": 64,
"R_[\u0627]K*I_[\u0644]K": 65,
"R_[\u0627]K*R_[\u062b]": 66,
"R_[\u0629]": 67,
"R_[\u062a]I_[\u0634]K*": 68,
"R_[\u062a]K*": 69,
"R_[\u062a]R_[\u0639]K*": 70,
"R_[\u062b]": 71,
"R_[\u062b]K*": 72,
"R_[\u062b]R_[\u0629]": 73,
"R_[\u0630]": 74,
"R_[\u0630]K*": 75,
"R_[\u0638]K*": 76,
"R_[\u063a]K*": 77,
"R_[\u0642]": 78,
"R_[\u0642]K*": 79,
"R_[\u0643]K*": 80,
"R_[\u0647]": 81,
"R_[\u0649]": 82,
"R_[\u064a]": 83
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.30.0",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 64000
}