Kobi-01 commited on
Commit
e5445c3
·
verified ·
1 Parent(s): 715bab1

Kobi-01/distilled-tamil-qa-v2

Browse files
Files changed (4) hide show
  1. README.md +105 -0
  2. adapter_config.json +35 -0
  3. adapter_model.safetensors +3 -0
  4. training_args.bin +3 -0
README.md ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: peft
3
+ license: mit
4
+ base_model: xlm-roberta-base
5
+ tags:
6
+ - generated_from_trainer
7
+ metrics:
8
+ - f1
9
+ model-index:
10
+ - name: tamil-qa-distilled
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # tamil-qa-distilled
18
+
19
+ This model is a fine-tuned version of [xlm-roberta-base](https://huggingface.co/xlm-roberta-base) on an unknown dataset.
20
+ It achieves the following results on the evaluation set:
21
+ - Loss: 2.4959
22
+ - Exact: 22.3153
23
+ - F1: 37.4456
24
+ - Total: 5848
25
+ - Hasans Exact: 22.3153
26
+ - Hasans F1: 37.4456
27
+ - Hasans Total: 5848
28
+ - Best Exact: 22.3153
29
+ - Best Exact Thresh: 0.0
30
+ - Best F1: 37.4456
31
+ - Best F1 Thresh: 0.0
32
+
33
+ ## Model description
34
+
35
+ More information needed
36
+
37
+ ## Intended uses & limitations
38
+
39
+ More information needed
40
+
41
+ ## Training and evaluation data
42
+
43
+ More information needed
44
+
45
+ ## Training procedure
46
+
47
+ ### Training hyperparameters
48
+
49
+ The following hyperparameters were used during training:
50
+ - learning_rate: 0.0002
51
+ - train_batch_size: 4
52
+ - eval_batch_size: 8
53
+ - seed: 42
54
+ - gradient_accumulation_steps: 2
55
+ - total_train_batch_size: 8
56
+ - optimizer: Use OptimizerNames.PAGED_ADAMW_8BIT with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
57
+ - lr_scheduler_type: linear
58
+ - num_epochs: 1
59
+
60
+ ### Training results
61
+
62
+ | Training Loss | Epoch | Step | Validation Loss | Exact | F1 | Total | Hasans Exact | Hasans F1 | Hasans Total | Best Exact | Best Exact Thresh | Best F1 | Best F1 Thresh |
63
+ |:-------------:|:------:|:----:|:---------------:|:-------:|:-------:|:-----:|:------------:|:---------:|:------------:|:----------:|:-----------------:|:-------:|:--------------:|
64
+ | 5.4467 | 0.0302 | 250 | 4.6445 | 0.4275 | 6.2970 | 5848 | 0.4275 | 6.2970 | 5848 | 0.4275 | 0.0 | 6.2970 | 0.0 |
65
+ | 4.3568 | 0.0604 | 500 | 4.0214 | 4.8906 | 12.5359 | 5848 | 4.8906 | 12.5359 | 5848 | 4.8906 | 0.0 | 12.5359 | 0.0 |
66
+ | 4.0656 | 0.0905 | 750 | 3.7904 | 12.5342 | 24.2632 | 5848 | 12.5342 | 24.2632 | 5848 | 12.5342 | 0.0 | 24.2632 | 0.0 |
67
+ | 3.8889 | 0.1207 | 1000 | 3.3827 | 15.5438 | 27.3077 | 5848 | 15.5438 | 27.3077 | 5848 | 15.5438 | 0.0 | 27.3077 | 0.0 |
68
+ | 3.4289 | 0.1509 | 1250 | 3.1672 | 13.8338 | 24.8643 | 5848 | 13.8338 | 24.8643 | 5848 | 13.8338 | 0.0 | 24.8643 | 0.0 |
69
+ | 3.5355 | 0.1811 | 1500 | 3.0776 | 16.9802 | 30.2997 | 5848 | 16.9802 | 30.2997 | 5848 | 16.9802 | 0.0 | 30.2997 | 0.0 |
70
+ | 3.1612 | 0.2112 | 1750 | 2.9511 | 19.4425 | 33.6814 | 5848 | 19.4425 | 33.6814 | 5848 | 19.4425 | 0.0 | 33.6814 | 0.0 |
71
+ | 3.2976 | 0.2414 | 2000 | 3.0930 | 20.0752 | 34.6007 | 5848 | 20.0752 | 34.6007 | 5848 | 20.0752 | 0.0 | 34.6007 | 0.0 |
72
+ | 3.2589 | 0.2716 | 2250 | 2.8639 | 18.9808 | 32.7633 | 5848 | 18.9808 | 32.7633 | 5848 | 18.9808 | 0.0 | 32.7633 | 0.0 |
73
+ | 2.9672 | 0.3018 | 2500 | 2.9398 | 20.4001 | 35.6524 | 5848 | 20.4001 | 35.6524 | 5848 | 20.4001 | 0.0 | 35.6524 | 0.0 |
74
+ | 3.7545 | 0.3319 | 2750 | 2.9454 | 19.2886 | 33.4999 | 5848 | 19.2886 | 33.4999 | 5848 | 19.2886 | 0.0 | 33.4999 | 0.0 |
75
+ | 2.8654 | 0.3621 | 3000 | 2.7416 | 18.7244 | 31.0609 | 5848 | 18.7244 | 31.0609 | 5848 | 18.7244 | 0.0 | 31.0609 | 0.0 |
76
+ | 3.0861 | 0.3923 | 3250 | 2.7491 | 21.0499 | 35.7036 | 5848 | 21.0499 | 35.7036 | 5848 | 21.0499 | 0.0 | 35.7036 | 0.0 |
77
+ | 2.7695 | 0.4225 | 3500 | 2.8284 | 21.5287 | 36.4741 | 5848 | 21.5287 | 36.4741 | 5848 | 21.5287 | 0.0 | 36.4741 | 0.0 |
78
+ | 3.0057 | 0.4526 | 3750 | 2.7043 | 20.1436 | 34.2750 | 5848 | 20.1436 | 34.2750 | 5848 | 20.1436 | 0.0 | 34.2750 | 0.0 |
79
+ | 2.9906 | 0.4828 | 4000 | 2.6032 | 20.1607 | 33.6974 | 5848 | 20.1607 | 33.6974 | 5848 | 20.1607 | 0.0 | 33.6974 | 0.0 |
80
+ | 2.7957 | 0.5130 | 4250 | 2.7008 | 20.9815 | 36.0642 | 5848 | 20.9815 | 36.0642 | 5848 | 20.9815 | 0.0 | 36.0642 | 0.0 |
81
+ | 2.8885 | 0.5432 | 4500 | 2.6539 | 21.5116 | 37.0821 | 5848 | 21.5116 | 37.0821 | 5848 | 21.5116 | 0.0 | 37.0821 | 0.0 |
82
+ | 2.7088 | 0.5733 | 4750 | 2.6216 | 22.1956 | 37.0885 | 5848 | 22.1956 | 37.0885 | 5848 | 22.1956 | 0.0 | 37.0885 | 0.0 |
83
+ | 2.5385 | 0.6035 | 5000 | 2.7195 | 22.1614 | 37.4103 | 5848 | 22.1614 | 37.4103 | 5848 | 22.1614 | 0.0 | 37.4103 | 0.0 |
84
+ | 2.7104 | 0.6337 | 5250 | 2.5939 | 21.0841 | 36.2480 | 5848 | 21.0841 | 36.2480 | 5848 | 21.0841 | 0.0 | 36.2480 | 0.0 |
85
+ | 2.9083 | 0.6639 | 5500 | 2.5586 | 21.8707 | 37.0920 | 5848 | 21.8707 | 37.0920 | 5848 | 21.8707 | 0.0 | 37.0920 | 0.0 |
86
+ | 2.7255 | 0.6940 | 5750 | 2.5521 | 21.3406 | 36.6577 | 5848 | 21.3406 | 36.6577 | 5848 | 21.3406 | 0.0 | 36.6577 | 0.0 |
87
+ | 2.6606 | 0.7242 | 6000 | 2.6845 | 22.7770 | 38.6065 | 5848 | 22.7770 | 38.6065 | 5848 | 22.7770 | 0.0 | 38.6065 | 0.0 |
88
+ | 2.7477 | 0.7544 | 6250 | 2.5134 | 21.3919 | 36.2724 | 5848 | 21.3919 | 36.2724 | 5848 | 21.3919 | 0.0 | 36.2724 | 0.0 |
89
+ | 2.6319 | 0.7846 | 6500 | 2.5086 | 21.2722 | 35.6261 | 5848 | 21.2722 | 35.6261 | 5848 | 21.2722 | 0.0 | 35.6261 | 0.0 |
90
+ | 2.687 | 0.8147 | 6750 | 2.6354 | 23.4097 | 39.3830 | 5848 | 23.4097 | 39.3830 | 5848 | 23.4097 | 0.0 | 39.3830 | 0.0 |
91
+ | 2.6415 | 0.8449 | 7000 | 2.4868 | 21.7339 | 36.0235 | 5848 | 21.7339 | 36.0235 | 5848 | 21.7339 | 0.0 | 36.0235 | 0.0 |
92
+ | 2.6671 | 0.8751 | 7250 | 2.5276 | 22.5718 | 38.0853 | 5848 | 22.5718 | 38.0853 | 5848 | 22.5718 | 0.0 | 38.0853 | 0.0 |
93
+ | 2.489 | 0.9053 | 7500 | 2.4679 | 21.7852 | 36.3686 | 5848 | 21.7852 | 36.3686 | 5848 | 21.7852 | 0.0 | 36.3686 | 0.0 |
94
+ | 2.6783 | 0.9354 | 7750 | 2.4992 | 22.1101 | 37.3694 | 5848 | 22.1101 | 37.3694 | 5848 | 22.1101 | 0.0 | 37.3694 | 0.0 |
95
+ | 2.75 | 0.9656 | 8000 | 2.5187 | 22.4692 | 37.8004 | 5848 | 22.4692 | 37.8004 | 5848 | 22.4692 | 0.0 | 37.8004 | 0.0 |
96
+ | 2.7079 | 0.9958 | 8250 | 2.4959 | 22.3153 | 37.4456 | 5848 | 22.3153 | 37.4456 | 5848 | 22.3153 | 0.0 | 37.4456 | 0.0 |
97
+
98
+
99
+ ### Framework versions
100
+
101
+ - PEFT 0.14.0
102
+ - Transformers 4.48.2
103
+ - Pytorch 2.5.1+cu124
104
+ - Datasets 3.2.0
105
+ - Tokenizers 0.21.0
adapter_config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "xlm-roberta-base",
5
+ "bias": "none",
6
+ "eva_config": null,
7
+ "exclude_modules": null,
8
+ "fan_in_fan_out": false,
9
+ "inference_mode": true,
10
+ "init_lora_weights": true,
11
+ "layer_replication": null,
12
+ "layers_pattern": null,
13
+ "layers_to_transform": null,
14
+ "loftq_config": {},
15
+ "lora_alpha": 32,
16
+ "lora_bias": false,
17
+ "lora_dropout": 0.05,
18
+ "megatron_config": null,
19
+ "megatron_core": "megatron.core",
20
+ "modules_to_save": [
21
+ "qa_outputs"
22
+ ],
23
+ "peft_type": "LORA",
24
+ "r": 8,
25
+ "rank_pattern": {},
26
+ "revision": null,
27
+ "target_modules": [
28
+ "query",
29
+ "value",
30
+ "key"
31
+ ],
32
+ "task_type": "QUESTION_ANS",
33
+ "use_dora": false,
34
+ "use_rslora": false
35
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9c6ba3a85a32e32221f2dca795d190d8d872c79a5225855e30f798a003dfa62
3
+ size 1782956
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f87101fd38e2eda11193a40cac4c0b424922fc592447cdbf67cf5eb75c49945
3
+ size 5304