Text Classification
Adapters
biology
naifenn commited on
Commit
bb7f618
·
verified ·
1 Parent(s): 8a2da4f

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. .DS_Store +0 -0
  2. adapter_config.json +27 -18
  3. adapters.safetensors +2 -2
.DS_Store ADDED
Binary file (6.15 kB). View file
 
adapter_config.json CHANGED
@@ -1,38 +1,47 @@
1
  {
2
  "adapter_path": "jumbo_adapters",
3
- "batch_size": 2,
4
  "config": null,
5
- "data": "cleaned_data/",
6
  "fine_tune_type": "lora",
7
- "grad_checkpoint": false,
8
- "iters": 10000,
9
- "learning_rate": 2e-05,
10
  "lora_parameters": {
11
  "keys": [
12
  "mlp.gate_proj",
13
  "mlp.down_proj",
14
  "self_attn.q_proj",
15
  "mlp.up_proj",
16
- "self_attn.o_proj",
17
  "self_attn.v_proj",
18
  "self_attn.k_proj"
19
  ],
20
- "rank": 8,
21
- "alpha": 8,
22
- "dropout": 0.05,
23
  "scale": 16.0
24
  },
25
- "lr_schedule": null,
26
- "max_seq_length": 2048,
27
- "model": "ministral/Ministral-3b-instruct",
28
- "num_layers": 14,
 
 
 
 
 
 
 
 
 
29
  "resume_adapter_file": null,
30
- "save_every": 100,
31
- "seed": 0,
32
- "steps_per_eval": 200,
33
  "steps_per_report": 10,
34
  "test": true,
35
- "test_batches": 500,
 
36
  "train": true,
37
- "val_batches": 25
38
  }
 
1
  {
2
  "adapter_path": "jumbo_adapters",
3
+ "batch_size": 16,
4
  "config": null,
5
+ "data": "data/",
6
  "fine_tune_type": "lora",
7
+ "grad_checkpoint": true,
8
+ "iters": 3000,
9
+ "learning_rate": 5e-05,
10
  "lora_parameters": {
11
  "keys": [
12
  "mlp.gate_proj",
13
  "mlp.down_proj",
14
  "self_attn.q_proj",
15
  "mlp.up_proj",
 
16
  "self_attn.v_proj",
17
  "self_attn.k_proj"
18
  ],
19
+ "rank": 64,
20
+ "alpha": 64,
21
+ "dropout": 0.1,
22
  "scale": 16.0
23
  },
24
+ "lr_schedule": {
25
+ "name": "cosine_decay",
26
+ "warmup": 500,
27
+ "warmup_init": 1e-07,
28
+ "arguments": [
29
+ 1e-05,
30
+ 500,
31
+ 1e-07
32
+ ]
33
+ },
34
+ "max_seq_length": 512,
35
+ "model": "Qwen/Qwen2.5-3B",
36
+ "num_layers": 36,
37
  "resume_adapter_file": null,
38
+ "save_every": 50,
39
+ "seed": 24,
40
+ "steps_per_eval": 50,
41
  "steps_per_report": 10,
42
  "test": true,
43
+ "test_batches": 200,
44
+ "testfile": "/Users/dutingzhen/PycharmProjects/finetuning/data/test.jsonl",
45
  "train": true,
46
+ "val_batches": 50
47
  }
adapters.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:004a9e19d8d2ae1a2e8a0309eb34c33938e4488a964eaee886f71d9b049f8ee7
3
- size 36721232
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b692f731342b23ea987aa4b3a09eb989b220062b56e59bdf034bdbbd424eef90
3
+ size 441236095