Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- README.md +129 -0
- checkpoint-36/config.json +35 -0
- checkpoint-36/generation_config.json +8 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_10_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_11_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_12_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_13_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_14_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_15_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_16_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_17_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_18_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_19_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_20_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_21_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_22_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_23_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_24_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_25_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_26_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_27_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_28_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_29_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_30_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_31_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_4_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_5_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_6_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_7_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_8_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/bf16_zero_pp_rank_9_mp_rank_00_optim_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_0_mp_rank_00_model_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_10_mp_rank_00_model_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_11_mp_rank_00_model_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_12_mp_rank_00_model_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_13_mp_rank_00_model_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_14_mp_rank_00_model_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_15_mp_rank_00_model_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_16_mp_rank_00_model_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_17_mp_rank_00_model_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_18_mp_rank_00_model_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_19_mp_rank_00_model_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_1_mp_rank_00_model_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_20_mp_rank_00_model_states.pt +3 -0
- checkpoint-36/global_step36/zero_pp_rank_21_mp_rank_00_model_states.pt +3 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
checkpoint-36/tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
| 2 |
+
should probably proofread and complete it, then remove this comment. -->
|
| 3 |
+
|
| 4 |
+
[<img src="https://raw.githubusercontent.com/axolotl-ai-cloud/axolotl/main/image/axolotl-badge-web.png" alt="Built with Axolotl" width="200" height="32"/>](https://github.com/axolotl-ai-cloud/axolotl)
|
| 5 |
+
<details><summary>See axolotl config</summary>
|
| 6 |
+
|
| 7 |
+
axolotl version: `0.9.2`
|
| 8 |
+
```yaml
|
| 9 |
+
base_model: /capstor/scratch/cscs/bbernath/models/meditron-70B
|
| 10 |
+
chat_template: llama3
|
| 11 |
+
bfloat16: true
|
| 12 |
+
output_dir: /capstor/store/cscs/swissai/a06/meditron/models/meditron_CHUV_2 #/capstor/scratch/cscs/bbernath/models/meditron_CHUV
|
| 13 |
+
dataset_prepared_path: /capstor/scratch/cscs/bbernath/dataset/
|
| 14 |
+
# - path: /capstor/store/cscs/swissai/a06/meditron/datasets/masked/special_mixture/instruction_tuning_mixture.jsonl
|
| 15 |
+
# type: chat_template
|
| 16 |
+
# ds_type: json
|
| 17 |
+
# split: train
|
| 18 |
+
# field_messages: conversations
|
| 19 |
+
# message_field_role: from
|
| 20 |
+
# message_field_content: value
|
| 21 |
+
#pretraining_dataset:
|
| 22 |
+
# - path: json
|
| 23 |
+
# data_files:
|
| 24 |
+
# - /capstor/store/cscs/swissai/a06/meditron/datasets/pretrain/pubmed/pubmed_3B.jsonl
|
| 25 |
+
# - /capstor/store/cscs/swissai/a06/meditron/datasets/pretrain/fineweb/fineweb_400M_anglais.jsonl
|
| 26 |
+
# type: pretrain
|
| 27 |
+
datasets:
|
| 28 |
+
- path: /capstor/store/cscs/swissai/a06/meditron/datasets/masked/gemini/moove_gemini_2.jsonl
|
| 29 |
+
type: chat_template
|
| 30 |
+
ds_type: json
|
| 31 |
+
split: train
|
| 32 |
+
field_messages: conversations
|
| 33 |
+
message_field_role: from
|
| 34 |
+
message_field_content: value
|
| 35 |
+
|
| 36 |
+
shuffle_merged_datasets: true
|
| 37 |
+
dataset_processes: 128
|
| 38 |
+
# max_steps: 1500
|
| 39 |
+
flash_attention: true
|
| 40 |
+
sequence_len: 8192
|
| 41 |
+
gradient_accumulation_steps: 1
|
| 42 |
+
micro_batch_size: 1
|
| 43 |
+
train_on_inputs: false
|
| 44 |
+
group_by_length: false
|
| 45 |
+
pad_to_sequence_len: true
|
| 46 |
+
sample_packing: true
|
| 47 |
+
optimizer: adamw_torch
|
| 48 |
+
optim_args:
|
| 49 |
+
fused: true
|
| 50 |
+
cosine_min_lr_ratio: 0.1
|
| 51 |
+
learning_rate: 1.0e-5
|
| 52 |
+
warmup_ratio: 0
|
| 53 |
+
weight_decay: 0.05
|
| 54 |
+
gradient_checkpointing: true
|
| 55 |
+
gradient_checkpointing_kwargs:
|
| 56 |
+
use_reentrant: false
|
| 57 |
+
load_in_4bit: false
|
| 58 |
+
load_in_8bit: false
|
| 59 |
+
num_epochs: 1
|
| 60 |
+
saves_per_epoch: 1
|
| 61 |
+
# evals_per_epoch: 1
|
| 62 |
+
eval_set_size: 0.0
|
| 63 |
+
eval_table_size: null
|
| 64 |
+
lr_scheduler: cosine
|
| 65 |
+
max_grad_norm: 1.0
|
| 66 |
+
resume_from_checkpoint: null
|
| 67 |
+
special_tokens:
|
| 68 |
+
pad_token: <|end_of_text|>
|
| 69 |
+
tf32: false
|
| 70 |
+
tokenizer_type: AutoTokenizer
|
| 71 |
+
type: LlamaForCausalLM
|
| 72 |
+
flash_attn_rms_norm: true
|
| 73 |
+
flash_attn_fuse_qkv: false
|
| 74 |
+
early_stopping_patience: 0
|
| 75 |
+
wandb_entity: alexs-team
|
| 76 |
+
wandb_name: meditron-CHUV-llama-gemini
|
| 77 |
+
wandb_project: Meditron DDX
|
| 78 |
+
wandb_watch: gradients
|
| 79 |
+
xformers_attention: null
|
| 80 |
+
logging_steps: 1
|
| 81 |
+
deepspeed: /capstor/users/cscs/bbernath/meditron/axolotl_config/deepspeed_new.json
|
| 82 |
+
|
| 83 |
+
```
|
| 84 |
+
|
| 85 |
+
</details><br>
|
| 86 |
+
|
| 87 |
+
# capstor/store/cscs/swissai/a06/meditron/models/meditron_CHUV_2
|
| 88 |
+
|
| 89 |
+
This model was trained from scratch on the /capstor/store/cscs/swissai/a06/meditron/datasets/masked/gemini/moove_gemini_2.jsonl dataset.
|
| 90 |
+
|
| 91 |
+
## Model description
|
| 92 |
+
|
| 93 |
+
More information needed
|
| 94 |
+
|
| 95 |
+
## Intended uses & limitations
|
| 96 |
+
|
| 97 |
+
More information needed
|
| 98 |
+
|
| 99 |
+
## Training and evaluation data
|
| 100 |
+
|
| 101 |
+
More information needed
|
| 102 |
+
|
| 103 |
+
## Training procedure
|
| 104 |
+
|
| 105 |
+
### Training hyperparameters
|
| 106 |
+
|
| 107 |
+
The following hyperparameters were used during training:
|
| 108 |
+
- learning_rate: 1e-05
|
| 109 |
+
- train_batch_size: 1
|
| 110 |
+
- eval_batch_size: 1
|
| 111 |
+
- seed: 42
|
| 112 |
+
- distributed_type: multi-GPU
|
| 113 |
+
- num_devices: 32
|
| 114 |
+
- total_train_batch_size: 32
|
| 115 |
+
- total_eval_batch_size: 32
|
| 116 |
+
- optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=fused=True
|
| 117 |
+
- lr_scheduler_type: cosine
|
| 118 |
+
- num_epochs: 1.0
|
| 119 |
+
|
| 120 |
+
### Training results
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
### Framework versions
|
| 125 |
+
|
| 126 |
+
- Transformers 4.51.3
|
| 127 |
+
- Pytorch 2.7.0a0+79aa17489c.nv25.04
|
| 128 |
+
- Datasets 3.6.0
|
| 129 |
+
- Tokenizers 0.21.1
|
checkpoint-36/config.json
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"LlamaForCausalLM"
|
| 4 |
+
],
|
| 5 |
+
"attention_bias": false,
|
| 6 |
+
"attention_dropout": 0.0,
|
| 7 |
+
"bos_token_id": 128000,
|
| 8 |
+
"eos_token_id": 128009,
|
| 9 |
+
"head_dim": 128,
|
| 10 |
+
"hidden_act": "silu",
|
| 11 |
+
"hidden_size": 8192,
|
| 12 |
+
"initializer_range": 0.02,
|
| 13 |
+
"intermediate_size": 28672,
|
| 14 |
+
"max_position_embeddings": 131072,
|
| 15 |
+
"mlp_bias": false,
|
| 16 |
+
"model_type": "llama",
|
| 17 |
+
"num_attention_heads": 64,
|
| 18 |
+
"num_hidden_layers": 80,
|
| 19 |
+
"num_key_value_heads": 8,
|
| 20 |
+
"pretraining_tp": 1,
|
| 21 |
+
"rms_norm_eps": 1e-05,
|
| 22 |
+
"rope_scaling": {
|
| 23 |
+
"factor": 8.0,
|
| 24 |
+
"high_freq_factor": 4.0,
|
| 25 |
+
"low_freq_factor": 1.0,
|
| 26 |
+
"original_max_position_embeddings": 8192,
|
| 27 |
+
"rope_type": "llama3"
|
| 28 |
+
},
|
| 29 |
+
"rope_theta": 500000.0,
|
| 30 |
+
"tie_word_embeddings": false,
|
| 31 |
+
"torch_dtype": "bfloat16",
|
| 32 |
+
"transformers_version": "4.51.3",
|
| 33 |
+
"use_cache": false,
|
| 34 |
+
"vocab_size": 128256
|
| 35 |
+
}
|
checkpoint-36/generation_config.json
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_from_model_config": true,
|
| 3 |
+
"bos_token_id": 128000,
|
| 4 |
+
"do_sample": true,
|
| 5 |
+
"eos_token_id": 128009,
|
| 6 |
+
"transformers_version": "4.51.3",
|
| 7 |
+
"use_cache": false
|
| 8 |
+
}
|
checkpoint-36/global_step36/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ec46a32fca2ccca258a58c77918b485df5958ff996e8ff55e6ffedc2d8ad32d7
|
| 3 |
+
size 26457647899
|
checkpoint-36/global_step36/bf16_zero_pp_rank_10_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5f7e306c55a37be3ff265007d2a97853a5b9ca3adb21075739e97109a3378898
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_11_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:272647272bd8b8019349059f094cbf8ac7a1cfcbff012c46231d6ade50e4cfda
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_12_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ddfb045c6f47ec71002bdba7866cf55dc4ef91073cc48cd5a0b08bc16d3ac7fa
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_13_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:34c687d3d0d350f4bc861668a23c3817c0b3ef9413d850282e10128631d5464b
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_14_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b32d9df14d2dd31d4a5d168c07f742b030df39483a44f927ce94fbf530526197
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_15_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fb11ff217245b1d98cdb1445ff82c4c7be32e01239045cd58da1e00f669d6dd9
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_16_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:80f740bb1e2accb82162465cb19448f1e5417fb2119927c73987835c91d2dfc2
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_17_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ac2b59a496ebb81ee002c0587d4daf65d4f55e19a3146d20582d25d8aa55d056
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_18_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c6b8402f886925efd507e5e0d9987af72dfaf32bee2b7963f73e40b87304b501
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_19_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f79cfd0a7c4ef9d11df917bb469720d1e180968e00bfa3faf555b1049a0ecbad
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b7e085faaebb62a087060b7924ffa83af25db5ae089ffe694a948cda74460c26
|
| 3 |
+
size 26457647899
|
checkpoint-36/global_step36/bf16_zero_pp_rank_20_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0372aaf9f6307b0c1e380896f3951a8f5ef2770fcf186f718169e3df45359fe3
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_21_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:00b8501529dc65482688229fef223654aac3aeea91776f97ab13e689caf37d26
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_22_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2c43100c0d1913b303de7b95ef59342febdedc37cd0cca025057c585b9cf79da
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_23_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:eba8650ad6f42fdc6e5da81f3d6c622461a5bfa53eeb8403ddc329ba99670d11
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_24_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6ef4839738273602ef80ce8d6c7a40f565710ac3f3c932bca40342c7e1de3e6a
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_25_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d98c0024b9e2d394ef1377e9d019ac513340a92d5ba75f6dde23009c9e8e96e6
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_26_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e74376045e97446cfbc7f230dd816665a2c0782134d7d23af53d48dccb3d499f
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_27_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:547e6b042d39ce10ef098e78d564e9fba98838aa4a9495e2a9fd9b5d5d58da9b
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_28_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6bed7372bb94cbd7135223cfade9262bd6d4a8e86e0543bd0d866ce284c5dae6
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_29_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:dae2304c180ceafedf9b3b191f3b47cb2514fee6494297078323dd464acc1019
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5c2b43824c957f5f04e29334de25d2b178a4a0dff9898b35b59f89db15eb724f
|
| 3 |
+
size 26457647899
|
checkpoint-36/global_step36/bf16_zero_pp_rank_30_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6a699b30cd9cc49febc227a4e0a4030d8220d2d080bbd85a1e22930f52569953
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_31_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a27b488b026a73055423e0b64fa9ad0cc0ecc6e595be5859dc989fcf61b3d569
|
| 3 |
+
size 26457647920
|
checkpoint-36/global_step36/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a59a831087f027bcdfc52845f24aeaa78e38f6998f5814c3ccc56b9069a6f8d6
|
| 3 |
+
size 26457647899
|
checkpoint-36/global_step36/bf16_zero_pp_rank_4_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cd13cabbd2da62ecb6332c731a4ea5ad9f6a1feb8cbdd1bcd151401e76afb8f4
|
| 3 |
+
size 26457647899
|
checkpoint-36/global_step36/bf16_zero_pp_rank_5_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:616345c7d42dd0f59f23e087a52a93254e204c90766d7fb6871da189b4a5fd77
|
| 3 |
+
size 26457647899
|
checkpoint-36/global_step36/bf16_zero_pp_rank_6_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7706350bf83875a771435f09acb630e513b6d81a41784c50ea94355ef17a6dbb
|
| 3 |
+
size 26457647899
|
checkpoint-36/global_step36/bf16_zero_pp_rank_7_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ba1229d305cf455d34ce383a62a4bab8b0fae14112f928a255f06e4d75e92216
|
| 3 |
+
size 26457647899
|
checkpoint-36/global_step36/bf16_zero_pp_rank_8_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7428dbf83ec6da45d14084396ba86876bd744f371ae4f934bad0a223d6dd98cd
|
| 3 |
+
size 26457647899
|
checkpoint-36/global_step36/bf16_zero_pp_rank_9_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:656453dd6ce5cc70e41bf3bf60831020dd5c119089602bbc327ad5d05cd91d34
|
| 3 |
+
size 26457647899
|
checkpoint-36/global_step36/zero_pp_rank_0_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:abd940b6b308999dcfcaaca453cdc08a9a52824b5c7b88b65fbde74d3a1912c1
|
| 3 |
+
size 368825
|
checkpoint-36/global_step36/zero_pp_rank_10_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e23ea85980c79b32f8997c3421b2d118737c9c0a6197cdef784787838c9630d1
|
| 3 |
+
size 369553
|
checkpoint-36/global_step36/zero_pp_rank_11_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:eed7911335b5495fc4e0f169703198a0d9dc6da5167fad47f50dcd9c505ba1d7
|
| 3 |
+
size 369553
|
checkpoint-36/global_step36/zero_pp_rank_12_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:de6a1407278c2b12baeb90323792c6bce769d8eccf5aeab40c8170cd4309b356
|
| 3 |
+
size 369553
|
checkpoint-36/global_step36/zero_pp_rank_13_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:26cf25f4c22b5122e09f7bc3ff5baec78326bab27309321aa1c28b9ee8019f86
|
| 3 |
+
size 369553
|
checkpoint-36/global_step36/zero_pp_rank_14_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:94ad84e9d8348269e2c0e2407a7069581ceeeba43014ff6c6112ebf5e7ce4ecb
|
| 3 |
+
size 369553
|
checkpoint-36/global_step36/zero_pp_rank_15_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:92ba4113926b265237917ce9064d49d752094f9b9b916fcd8c809cb6f69d6e48
|
| 3 |
+
size 369553
|
checkpoint-36/global_step36/zero_pp_rank_16_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:93a9e23a2ca3df99a6b4e75b3783c41f75b7326e9bdba6c3401c7fad87a34330
|
| 3 |
+
size 369553
|
checkpoint-36/global_step36/zero_pp_rank_17_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0728bc6ac583a0416232f29e25afcd9c4fe7f27737c8b5a5b924fbed5bd5c050
|
| 3 |
+
size 369553
|
checkpoint-36/global_step36/zero_pp_rank_18_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6f287a2d496f3dd3be247ba51a11a500f13bab9c73984b44347e45ccd4e537b8
|
| 3 |
+
size 369553
|
checkpoint-36/global_step36/zero_pp_rank_19_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5ccaa58c54a4508913bb2c77c9371c4d7c7bc20facb07284b48928c0e7a80c0d
|
| 3 |
+
size 369553
|
checkpoint-36/global_step36/zero_pp_rank_1_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1d42baaa3944d5833bce5dfba236a6111e6a7b4638e530f2e47d64ef3ec7653a
|
| 3 |
+
size 368825
|
checkpoint-36/global_step36/zero_pp_rank_20_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9c99866e7ccba32451a91fd21a524ec6b7a09f24a2ac2d7cbc1d1cb48217516e
|
| 3 |
+
size 369553
|
checkpoint-36/global_step36/zero_pp_rank_21_mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0d5314246d66823695d948897d960c375b27748b8753ec8da133d0e42238ff6a
|
| 3 |
+
size 369553
|