Model save
Browse files- .gitattributes +1 -0
- README.md +60 -0
- added_tokens.json +24 -0
- config.json +28 -0
- generation_config.json +14 -0
- merges.txt +0 -0
- model-00001-of-00002.safetensors +3 -0
- model-00002-of-00002.safetensors +3 -0
- model.safetensors.index.json +441 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +209 -0
- trainer_log.jsonl +241 -0
- training_args.bin +3 -0
- vocab.json +0 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
library_name: transformers
|
| 3 |
+
license: other
|
| 4 |
+
base_model: Qwen/Qwen2.5-Coder-3B-Instruct
|
| 5 |
+
tags:
|
| 6 |
+
- llama-factory
|
| 7 |
+
- generated_from_trainer
|
| 8 |
+
model-index:
|
| 9 |
+
- name: ex52_qwen2.5_3b_101k_16kcw_3ep_cuda_amd_os_4090
|
| 10 |
+
results: []
|
| 11 |
+
---
|
| 12 |
+
|
| 13 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
| 14 |
+
should probably proofread and complete it, then remove this comment. -->
|
| 15 |
+
|
| 16 |
+
# ex52_qwen2.5_3b_101k_16kcw_3ep_cuda_amd_os_4090
|
| 17 |
+
|
| 18 |
+
This model is a fine-tuned version of [Qwen/Qwen2.5-Coder-3B-Instruct](https://huggingface.co/Qwen/Qwen2.5-Coder-3B-Instruct) on the None dataset.
|
| 19 |
+
|
| 20 |
+
## Model description
|
| 21 |
+
|
| 22 |
+
More information needed
|
| 23 |
+
|
| 24 |
+
## Intended uses & limitations
|
| 25 |
+
|
| 26 |
+
More information needed
|
| 27 |
+
|
| 28 |
+
## Training and evaluation data
|
| 29 |
+
|
| 30 |
+
More information needed
|
| 31 |
+
|
| 32 |
+
## Training procedure
|
| 33 |
+
|
| 34 |
+
### Training hyperparameters
|
| 35 |
+
|
| 36 |
+
The following hyperparameters were used during training:
|
| 37 |
+
- learning_rate: 2e-05
|
| 38 |
+
- train_batch_size: 4
|
| 39 |
+
- eval_batch_size: 8
|
| 40 |
+
- seed: 42
|
| 41 |
+
- distributed_type: multi-GPU
|
| 42 |
+
- num_devices: 4
|
| 43 |
+
- gradient_accumulation_steps: 8
|
| 44 |
+
- total_train_batch_size: 128
|
| 45 |
+
- total_eval_batch_size: 32
|
| 46 |
+
- optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
| 47 |
+
- lr_scheduler_type: cosine
|
| 48 |
+
- lr_scheduler_warmup_ratio: 0.1
|
| 49 |
+
- num_epochs: 3.0
|
| 50 |
+
|
| 51 |
+
### Training results
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
### Framework versions
|
| 56 |
+
|
| 57 |
+
- Transformers 4.51.3
|
| 58 |
+
- Pytorch 2.6.0+cu124
|
| 59 |
+
- Datasets 3.2.0
|
| 60 |
+
- Tokenizers 0.21.0
|
added_tokens.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</tool_call>": 151658,
|
| 3 |
+
"<tool_call>": 151657,
|
| 4 |
+
"<|box_end|>": 151649,
|
| 5 |
+
"<|box_start|>": 151648,
|
| 6 |
+
"<|endoftext|>": 151643,
|
| 7 |
+
"<|file_sep|>": 151664,
|
| 8 |
+
"<|fim_middle|>": 151660,
|
| 9 |
+
"<|fim_pad|>": 151662,
|
| 10 |
+
"<|fim_prefix|>": 151659,
|
| 11 |
+
"<|fim_suffix|>": 151661,
|
| 12 |
+
"<|im_end|>": 151645,
|
| 13 |
+
"<|im_start|>": 151644,
|
| 14 |
+
"<|image_pad|>": 151655,
|
| 15 |
+
"<|object_ref_end|>": 151647,
|
| 16 |
+
"<|object_ref_start|>": 151646,
|
| 17 |
+
"<|quad_end|>": 151651,
|
| 18 |
+
"<|quad_start|>": 151650,
|
| 19 |
+
"<|repo_name|>": 151663,
|
| 20 |
+
"<|video_pad|>": 151656,
|
| 21 |
+
"<|vision_end|>": 151653,
|
| 22 |
+
"<|vision_pad|>": 151654,
|
| 23 |
+
"<|vision_start|>": 151652
|
| 24 |
+
}
|
config.json
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"Qwen2ForCausalLM"
|
| 4 |
+
],
|
| 5 |
+
"attention_dropout": 0.0,
|
| 6 |
+
"bos_token_id": 151643,
|
| 7 |
+
"eos_token_id": 151645,
|
| 8 |
+
"hidden_act": "silu",
|
| 9 |
+
"hidden_size": 2048,
|
| 10 |
+
"initializer_range": 0.02,
|
| 11 |
+
"intermediate_size": 11008,
|
| 12 |
+
"max_position_embeddings": 32768,
|
| 13 |
+
"max_window_layers": 36,
|
| 14 |
+
"model_type": "qwen2",
|
| 15 |
+
"num_attention_heads": 16,
|
| 16 |
+
"num_hidden_layers": 36,
|
| 17 |
+
"num_key_value_heads": 2,
|
| 18 |
+
"rms_norm_eps": 1e-06,
|
| 19 |
+
"rope_scaling": null,
|
| 20 |
+
"rope_theta": 1000000.0,
|
| 21 |
+
"sliding_window": 32768,
|
| 22 |
+
"tie_word_embeddings": true,
|
| 23 |
+
"torch_dtype": "bfloat16",
|
| 24 |
+
"transformers_version": "4.51.3",
|
| 25 |
+
"use_cache": false,
|
| 26 |
+
"use_sliding_window": false,
|
| 27 |
+
"vocab_size": 151936
|
| 28 |
+
}
|
generation_config.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token_id": 151643,
|
| 3 |
+
"do_sample": true,
|
| 4 |
+
"eos_token_id": [
|
| 5 |
+
151645,
|
| 6 |
+
151643
|
| 7 |
+
],
|
| 8 |
+
"pad_token_id": 151643,
|
| 9 |
+
"repetition_penalty": 1.05,
|
| 10 |
+
"temperature": 0.7,
|
| 11 |
+
"top_k": 20,
|
| 12 |
+
"top_p": 0.8,
|
| 13 |
+
"transformers_version": "4.51.3"
|
| 14 |
+
}
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model-00001-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b49457d526a4625381f3dc5c009bba984efaf5f41dbb92815ea33b776b976ee8
|
| 3 |
+
size 4957560304
|
model-00002-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:74e08badfe81191569a67ea1a6b5cd5f79e7395b981b264c871147cb1c4634c2
|
| 3 |
+
size 1214366696
|
model.safetensors.index.json
ADDED
|
@@ -0,0 +1,441 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_size": 6171877376
|
| 4 |
+
},
|
| 5 |
+
"weight_map": {
|
| 6 |
+
"model.embed_tokens.weight": "model-00001-of-00002.safetensors",
|
| 7 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 8 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 9 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 10 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 11 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 12 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 13 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 14 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 15 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 16 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 17 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 18 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 19 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 20 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 21 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 22 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 23 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 24 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 25 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 26 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 27 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 28 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 29 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 30 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 31 |
+
"model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 32 |
+
"model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 33 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 34 |
+
"model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 35 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 36 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 37 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 38 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 39 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 40 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 41 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 42 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 43 |
+
"model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 44 |
+
"model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 45 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 46 |
+
"model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 47 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 48 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 49 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 50 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 51 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 52 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 53 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 54 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 55 |
+
"model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 56 |
+
"model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 57 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 58 |
+
"model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 59 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 60 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 61 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 62 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 63 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 64 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 65 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 66 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 67 |
+
"model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 68 |
+
"model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 69 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 70 |
+
"model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 71 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 72 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 73 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 74 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 75 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 76 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 77 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 78 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 79 |
+
"model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 80 |
+
"model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 81 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 82 |
+
"model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 83 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 84 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 85 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 86 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 87 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 88 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 89 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 90 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 91 |
+
"model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 92 |
+
"model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 93 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 94 |
+
"model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 95 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 96 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 97 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 98 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 99 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 100 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 101 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 102 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 103 |
+
"model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 104 |
+
"model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 105 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 106 |
+
"model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 107 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 108 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 109 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 110 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 111 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 112 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 113 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 114 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 115 |
+
"model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 116 |
+
"model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 117 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 118 |
+
"model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 119 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 120 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 121 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 122 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 123 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 124 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 125 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 126 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 127 |
+
"model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 128 |
+
"model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 129 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 130 |
+
"model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 131 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 132 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 133 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 134 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 135 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 136 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 137 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 138 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 139 |
+
"model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 140 |
+
"model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 141 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 142 |
+
"model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 143 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 144 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 145 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 146 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 147 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 148 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 149 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 150 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 151 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 152 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 153 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 154 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 155 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 156 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 157 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 158 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 159 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 160 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 161 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 162 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 163 |
+
"model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 164 |
+
"model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 165 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 166 |
+
"model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 167 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 168 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 169 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 170 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 171 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 172 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 173 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 174 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 175 |
+
"model.layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 176 |
+
"model.layers.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 177 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 178 |
+
"model.layers.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 179 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 180 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 181 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 182 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 183 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 184 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 185 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 186 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 187 |
+
"model.layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 188 |
+
"model.layers.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 189 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 190 |
+
"model.layers.22.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 191 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 192 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 193 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 194 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 195 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 196 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 197 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 198 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 199 |
+
"model.layers.23.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 200 |
+
"model.layers.23.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 201 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 202 |
+
"model.layers.23.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 203 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 204 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 205 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 206 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 207 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 208 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 209 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 210 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 211 |
+
"model.layers.24.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 212 |
+
"model.layers.24.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 213 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 214 |
+
"model.layers.24.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 215 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 216 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 217 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 218 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 219 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 220 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 221 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 222 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 223 |
+
"model.layers.25.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 224 |
+
"model.layers.25.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 225 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 226 |
+
"model.layers.25.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 227 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 228 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 229 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 230 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 231 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 232 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 233 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 234 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 235 |
+
"model.layers.26.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 236 |
+
"model.layers.26.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 237 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 238 |
+
"model.layers.26.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 239 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 240 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 241 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 242 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 243 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 244 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 245 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 246 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 247 |
+
"model.layers.27.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 248 |
+
"model.layers.27.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 249 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 250 |
+
"model.layers.27.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 251 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 252 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 253 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 254 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 255 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 256 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 257 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 258 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 259 |
+
"model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 260 |
+
"model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 261 |
+
"model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 262 |
+
"model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 263 |
+
"model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 264 |
+
"model.layers.28.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 265 |
+
"model.layers.28.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 266 |
+
"model.layers.28.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 267 |
+
"model.layers.28.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 268 |
+
"model.layers.28.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 269 |
+
"model.layers.28.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 270 |
+
"model.layers.28.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 271 |
+
"model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 272 |
+
"model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 273 |
+
"model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 274 |
+
"model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 275 |
+
"model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 276 |
+
"model.layers.29.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 277 |
+
"model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 278 |
+
"model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 279 |
+
"model.layers.29.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 280 |
+
"model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 281 |
+
"model.layers.29.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 282 |
+
"model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 283 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 284 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 285 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 286 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 287 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 288 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 289 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 290 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 291 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 292 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 293 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 294 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 295 |
+
"model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 296 |
+
"model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 297 |
+
"model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 298 |
+
"model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 299 |
+
"model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 300 |
+
"model.layers.30.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 301 |
+
"model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 302 |
+
"model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 303 |
+
"model.layers.30.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 304 |
+
"model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 305 |
+
"model.layers.30.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 306 |
+
"model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 307 |
+
"model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 308 |
+
"model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 309 |
+
"model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 310 |
+
"model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 311 |
+
"model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 312 |
+
"model.layers.31.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 313 |
+
"model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 314 |
+
"model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 315 |
+
"model.layers.31.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 316 |
+
"model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 317 |
+
"model.layers.31.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 318 |
+
"model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 319 |
+
"model.layers.32.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 320 |
+
"model.layers.32.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 321 |
+
"model.layers.32.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 322 |
+
"model.layers.32.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 323 |
+
"model.layers.32.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 324 |
+
"model.layers.32.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 325 |
+
"model.layers.32.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 326 |
+
"model.layers.32.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 327 |
+
"model.layers.32.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 328 |
+
"model.layers.32.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 329 |
+
"model.layers.32.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 330 |
+
"model.layers.32.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 331 |
+
"model.layers.33.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 332 |
+
"model.layers.33.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 333 |
+
"model.layers.33.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 334 |
+
"model.layers.33.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 335 |
+
"model.layers.33.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 336 |
+
"model.layers.33.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 337 |
+
"model.layers.33.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 338 |
+
"model.layers.33.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 339 |
+
"model.layers.33.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 340 |
+
"model.layers.33.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 341 |
+
"model.layers.33.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 342 |
+
"model.layers.33.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 343 |
+
"model.layers.34.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 344 |
+
"model.layers.34.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 345 |
+
"model.layers.34.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 346 |
+
"model.layers.34.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 347 |
+
"model.layers.34.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 348 |
+
"model.layers.34.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 349 |
+
"model.layers.34.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 350 |
+
"model.layers.34.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 351 |
+
"model.layers.34.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 352 |
+
"model.layers.34.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 353 |
+
"model.layers.34.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 354 |
+
"model.layers.34.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 355 |
+
"model.layers.35.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 356 |
+
"model.layers.35.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 357 |
+
"model.layers.35.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 358 |
+
"model.layers.35.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 359 |
+
"model.layers.35.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 360 |
+
"model.layers.35.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 361 |
+
"model.layers.35.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 362 |
+
"model.layers.35.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 363 |
+
"model.layers.35.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 364 |
+
"model.layers.35.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 365 |
+
"model.layers.35.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 366 |
+
"model.layers.35.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 367 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 368 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 369 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 370 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 371 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 372 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 373 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 374 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 375 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 376 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 377 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 378 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 379 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 380 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 381 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 382 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 383 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 384 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 385 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 386 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 387 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 388 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 389 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 390 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 391 |
+
"model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 392 |
+
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 393 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 394 |
+
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 395 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 396 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 397 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 398 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 399 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 400 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 401 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 402 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 403 |
+
"model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 404 |
+
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 405 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 406 |
+
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 407 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 408 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 409 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 410 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 411 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 412 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 413 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 414 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 415 |
+
"model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 416 |
+
"model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 417 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 418 |
+
"model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 419 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 420 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 421 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 422 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 423 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 424 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 425 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 426 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 427 |
+
"model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 428 |
+
"model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 429 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 430 |
+
"model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 431 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 432 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 433 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 434 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 435 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 436 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 437 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 438 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 439 |
+
"model.norm.weight": "model-00002-of-00002.safetensors"
|
| 440 |
+
}
|
| 441 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<|im_start|>",
|
| 4 |
+
"<|im_end|>",
|
| 5 |
+
"<|object_ref_start|>",
|
| 6 |
+
"<|object_ref_end|>",
|
| 7 |
+
"<|box_start|>",
|
| 8 |
+
"<|box_end|>",
|
| 9 |
+
"<|quad_start|>",
|
| 10 |
+
"<|quad_end|>",
|
| 11 |
+
"<|vision_start|>",
|
| 12 |
+
"<|vision_end|>",
|
| 13 |
+
"<|vision_pad|>",
|
| 14 |
+
"<|image_pad|>",
|
| 15 |
+
"<|video_pad|>"
|
| 16 |
+
],
|
| 17 |
+
"eos_token": {
|
| 18 |
+
"content": "<|im_end|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
},
|
| 24 |
+
"pad_token": {
|
| 25 |
+
"content": "<|endoftext|>",
|
| 26 |
+
"lstrip": false,
|
| 27 |
+
"normalized": false,
|
| 28 |
+
"rstrip": false,
|
| 29 |
+
"single_word": false
|
| 30 |
+
}
|
| 31 |
+
}
|
tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
|
| 3 |
+
size 11421896
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": false,
|
| 3 |
+
"add_prefix_space": false,
|
| 4 |
+
"added_tokens_decoder": {
|
| 5 |
+
"151643": {
|
| 6 |
+
"content": "<|endoftext|>",
|
| 7 |
+
"lstrip": false,
|
| 8 |
+
"normalized": false,
|
| 9 |
+
"rstrip": false,
|
| 10 |
+
"single_word": false,
|
| 11 |
+
"special": true
|
| 12 |
+
},
|
| 13 |
+
"151644": {
|
| 14 |
+
"content": "<|im_start|>",
|
| 15 |
+
"lstrip": false,
|
| 16 |
+
"normalized": false,
|
| 17 |
+
"rstrip": false,
|
| 18 |
+
"single_word": false,
|
| 19 |
+
"special": true
|
| 20 |
+
},
|
| 21 |
+
"151645": {
|
| 22 |
+
"content": "<|im_end|>",
|
| 23 |
+
"lstrip": false,
|
| 24 |
+
"normalized": false,
|
| 25 |
+
"rstrip": false,
|
| 26 |
+
"single_word": false,
|
| 27 |
+
"special": true
|
| 28 |
+
},
|
| 29 |
+
"151646": {
|
| 30 |
+
"content": "<|object_ref_start|>",
|
| 31 |
+
"lstrip": false,
|
| 32 |
+
"normalized": false,
|
| 33 |
+
"rstrip": false,
|
| 34 |
+
"single_word": false,
|
| 35 |
+
"special": true
|
| 36 |
+
},
|
| 37 |
+
"151647": {
|
| 38 |
+
"content": "<|object_ref_end|>",
|
| 39 |
+
"lstrip": false,
|
| 40 |
+
"normalized": false,
|
| 41 |
+
"rstrip": false,
|
| 42 |
+
"single_word": false,
|
| 43 |
+
"special": true
|
| 44 |
+
},
|
| 45 |
+
"151648": {
|
| 46 |
+
"content": "<|box_start|>",
|
| 47 |
+
"lstrip": false,
|
| 48 |
+
"normalized": false,
|
| 49 |
+
"rstrip": false,
|
| 50 |
+
"single_word": false,
|
| 51 |
+
"special": true
|
| 52 |
+
},
|
| 53 |
+
"151649": {
|
| 54 |
+
"content": "<|box_end|>",
|
| 55 |
+
"lstrip": false,
|
| 56 |
+
"normalized": false,
|
| 57 |
+
"rstrip": false,
|
| 58 |
+
"single_word": false,
|
| 59 |
+
"special": true
|
| 60 |
+
},
|
| 61 |
+
"151650": {
|
| 62 |
+
"content": "<|quad_start|>",
|
| 63 |
+
"lstrip": false,
|
| 64 |
+
"normalized": false,
|
| 65 |
+
"rstrip": false,
|
| 66 |
+
"single_word": false,
|
| 67 |
+
"special": true
|
| 68 |
+
},
|
| 69 |
+
"151651": {
|
| 70 |
+
"content": "<|quad_end|>",
|
| 71 |
+
"lstrip": false,
|
| 72 |
+
"normalized": false,
|
| 73 |
+
"rstrip": false,
|
| 74 |
+
"single_word": false,
|
| 75 |
+
"special": true
|
| 76 |
+
},
|
| 77 |
+
"151652": {
|
| 78 |
+
"content": "<|vision_start|>",
|
| 79 |
+
"lstrip": false,
|
| 80 |
+
"normalized": false,
|
| 81 |
+
"rstrip": false,
|
| 82 |
+
"single_word": false,
|
| 83 |
+
"special": true
|
| 84 |
+
},
|
| 85 |
+
"151653": {
|
| 86 |
+
"content": "<|vision_end|>",
|
| 87 |
+
"lstrip": false,
|
| 88 |
+
"normalized": false,
|
| 89 |
+
"rstrip": false,
|
| 90 |
+
"single_word": false,
|
| 91 |
+
"special": true
|
| 92 |
+
},
|
| 93 |
+
"151654": {
|
| 94 |
+
"content": "<|vision_pad|>",
|
| 95 |
+
"lstrip": false,
|
| 96 |
+
"normalized": false,
|
| 97 |
+
"rstrip": false,
|
| 98 |
+
"single_word": false,
|
| 99 |
+
"special": true
|
| 100 |
+
},
|
| 101 |
+
"151655": {
|
| 102 |
+
"content": "<|image_pad|>",
|
| 103 |
+
"lstrip": false,
|
| 104 |
+
"normalized": false,
|
| 105 |
+
"rstrip": false,
|
| 106 |
+
"single_word": false,
|
| 107 |
+
"special": true
|
| 108 |
+
},
|
| 109 |
+
"151656": {
|
| 110 |
+
"content": "<|video_pad|>",
|
| 111 |
+
"lstrip": false,
|
| 112 |
+
"normalized": false,
|
| 113 |
+
"rstrip": false,
|
| 114 |
+
"single_word": false,
|
| 115 |
+
"special": true
|
| 116 |
+
},
|
| 117 |
+
"151657": {
|
| 118 |
+
"content": "<tool_call>",
|
| 119 |
+
"lstrip": false,
|
| 120 |
+
"normalized": false,
|
| 121 |
+
"rstrip": false,
|
| 122 |
+
"single_word": false,
|
| 123 |
+
"special": false
|
| 124 |
+
},
|
| 125 |
+
"151658": {
|
| 126 |
+
"content": "</tool_call>",
|
| 127 |
+
"lstrip": false,
|
| 128 |
+
"normalized": false,
|
| 129 |
+
"rstrip": false,
|
| 130 |
+
"single_word": false,
|
| 131 |
+
"special": false
|
| 132 |
+
},
|
| 133 |
+
"151659": {
|
| 134 |
+
"content": "<|fim_prefix|>",
|
| 135 |
+
"lstrip": false,
|
| 136 |
+
"normalized": false,
|
| 137 |
+
"rstrip": false,
|
| 138 |
+
"single_word": false,
|
| 139 |
+
"special": false
|
| 140 |
+
},
|
| 141 |
+
"151660": {
|
| 142 |
+
"content": "<|fim_middle|>",
|
| 143 |
+
"lstrip": false,
|
| 144 |
+
"normalized": false,
|
| 145 |
+
"rstrip": false,
|
| 146 |
+
"single_word": false,
|
| 147 |
+
"special": false
|
| 148 |
+
},
|
| 149 |
+
"151661": {
|
| 150 |
+
"content": "<|fim_suffix|>",
|
| 151 |
+
"lstrip": false,
|
| 152 |
+
"normalized": false,
|
| 153 |
+
"rstrip": false,
|
| 154 |
+
"single_word": false,
|
| 155 |
+
"special": false
|
| 156 |
+
},
|
| 157 |
+
"151662": {
|
| 158 |
+
"content": "<|fim_pad|>",
|
| 159 |
+
"lstrip": false,
|
| 160 |
+
"normalized": false,
|
| 161 |
+
"rstrip": false,
|
| 162 |
+
"single_word": false,
|
| 163 |
+
"special": false
|
| 164 |
+
},
|
| 165 |
+
"151663": {
|
| 166 |
+
"content": "<|repo_name|>",
|
| 167 |
+
"lstrip": false,
|
| 168 |
+
"normalized": false,
|
| 169 |
+
"rstrip": false,
|
| 170 |
+
"single_word": false,
|
| 171 |
+
"special": false
|
| 172 |
+
},
|
| 173 |
+
"151664": {
|
| 174 |
+
"content": "<|file_sep|>",
|
| 175 |
+
"lstrip": false,
|
| 176 |
+
"normalized": false,
|
| 177 |
+
"rstrip": false,
|
| 178 |
+
"single_word": false,
|
| 179 |
+
"special": false
|
| 180 |
+
}
|
| 181 |
+
},
|
| 182 |
+
"additional_special_tokens": [
|
| 183 |
+
"<|im_start|>",
|
| 184 |
+
"<|im_end|>",
|
| 185 |
+
"<|object_ref_start|>",
|
| 186 |
+
"<|object_ref_end|>",
|
| 187 |
+
"<|box_start|>",
|
| 188 |
+
"<|box_end|>",
|
| 189 |
+
"<|quad_start|>",
|
| 190 |
+
"<|quad_end|>",
|
| 191 |
+
"<|vision_start|>",
|
| 192 |
+
"<|vision_end|>",
|
| 193 |
+
"<|vision_pad|>",
|
| 194 |
+
"<|image_pad|>",
|
| 195 |
+
"<|video_pad|>"
|
| 196 |
+
],
|
| 197 |
+
"bos_token": null,
|
| 198 |
+
"chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
|
| 199 |
+
"clean_up_tokenization_spaces": false,
|
| 200 |
+
"eos_token": "<|im_end|>",
|
| 201 |
+
"errors": "replace",
|
| 202 |
+
"extra_special_tokens": {},
|
| 203 |
+
"model_max_length": 32768,
|
| 204 |
+
"pad_token": "<|endoftext|>",
|
| 205 |
+
"padding_side": "right",
|
| 206 |
+
"split_special_tokens": false,
|
| 207 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
| 208 |
+
"unk_token": null
|
| 209 |
+
}
|
trainer_log.jsonl
ADDED
|
@@ -0,0 +1,241 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{"current_steps": 10, "total_steps": 2409, "loss": 0.637, "lr": 7.468879668049793e-07, "epoch": 0.012439744985227803, "percentage": 0.42, "elapsed_time": "0:11:12", "remaining_time": "1 day, 20:47:31"}
|
| 2 |
+
{"current_steps": 20, "total_steps": 2409, "loss": 0.5821, "lr": 1.5767634854771784e-06, "epoch": 0.024879489970455606, "percentage": 0.83, "elapsed_time": "0:22:20", "remaining_time": "1 day, 20:28:33"}
|
| 3 |
+
{"current_steps": 30, "total_steps": 2409, "loss": 0.425, "lr": 2.4066390041493776e-06, "epoch": 0.03731923495568341, "percentage": 1.25, "elapsed_time": "0:33:36", "remaining_time": "1 day, 20:24:55"}
|
| 4 |
+
{"current_steps": 40, "total_steps": 2409, "loss": 0.2552, "lr": 3.2365145228215773e-06, "epoch": 0.04975897994091121, "percentage": 1.66, "elapsed_time": "0:44:48", "remaining_time": "1 day, 20:13:57"}
|
| 5 |
+
{"current_steps": 50, "total_steps": 2409, "loss": 0.1621, "lr": 4.0663900414937765e-06, "epoch": 0.06219872492613902, "percentage": 2.08, "elapsed_time": "0:56:06", "remaining_time": "1 day, 20:06:58"}
|
| 6 |
+
{"current_steps": 60, "total_steps": 2409, "loss": 0.1242, "lr": 4.896265560165976e-06, "epoch": 0.07463846991136681, "percentage": 2.49, "elapsed_time": "1:07:03", "remaining_time": "1 day, 19:45:16"}
|
| 7 |
+
{"current_steps": 70, "total_steps": 2409, "loss": 0.0994, "lr": 5.726141078838174e-06, "epoch": 0.08707821489659462, "percentage": 2.91, "elapsed_time": "1:18:24", "remaining_time": "1 day, 19:40:05"}
|
| 8 |
+
{"current_steps": 80, "total_steps": 2409, "loss": 0.0867, "lr": 6.556016597510374e-06, "epoch": 0.09951795988182242, "percentage": 3.32, "elapsed_time": "1:29:38", "remaining_time": "1 day, 19:29:44"}
|
| 9 |
+
{"current_steps": 90, "total_steps": 2409, "loss": 0.0783, "lr": 7.385892116182573e-06, "epoch": 0.11195770486705023, "percentage": 3.74, "elapsed_time": "1:41:07", "remaining_time": "1 day, 19:25:40"}
|
| 10 |
+
{"current_steps": 100, "total_steps": 2409, "loss": 0.0659, "lr": 8.215767634854772e-06, "epoch": 0.12439744985227803, "percentage": 4.15, "elapsed_time": "1:52:19", "remaining_time": "1 day, 19:13:38"}
|
| 11 |
+
{"current_steps": 110, "total_steps": 2409, "loss": 0.0622, "lr": 9.045643153526971e-06, "epoch": 0.13683719483750584, "percentage": 4.57, "elapsed_time": "2:03:30", "remaining_time": "1 day, 19:01:29"}
|
| 12 |
+
{"current_steps": 120, "total_steps": 2409, "loss": 0.0594, "lr": 9.875518672199172e-06, "epoch": 0.14927693982273363, "percentage": 4.98, "elapsed_time": "2:14:53", "remaining_time": "1 day, 18:52:54"}
|
| 13 |
+
{"current_steps": 130, "total_steps": 2409, "loss": 0.0586, "lr": 1.070539419087137e-05, "epoch": 0.16171668480796145, "percentage": 5.4, "elapsed_time": "2:26:07", "remaining_time": "1 day, 18:41:42"}
|
| 14 |
+
{"current_steps": 140, "total_steps": 2409, "loss": 0.0484, "lr": 1.1535269709543569e-05, "epoch": 0.17415642979318924, "percentage": 5.81, "elapsed_time": "2:37:33", "remaining_time": "1 day, 18:33:31"}
|
| 15 |
+
{"current_steps": 150, "total_steps": 2409, "loss": 0.0509, "lr": 1.236514522821577e-05, "epoch": 0.18659617477841703, "percentage": 6.23, "elapsed_time": "2:48:40", "remaining_time": "1 day, 18:20:09"}
|
| 16 |
+
{"current_steps": 160, "total_steps": 2409, "loss": 0.0481, "lr": 1.3195020746887967e-05, "epoch": 0.19903591976364485, "percentage": 6.64, "elapsed_time": "2:59:50", "remaining_time": "1 day, 18:07:51"}
|
| 17 |
+
{"current_steps": 170, "total_steps": 2409, "loss": 0.0488, "lr": 1.4024896265560166e-05, "epoch": 0.21147566474887264, "percentage": 7.06, "elapsed_time": "3:10:53", "remaining_time": "1 day, 17:54:12"}
|
| 18 |
+
{"current_steps": 180, "total_steps": 2409, "loss": 0.0413, "lr": 1.4854771784232367e-05, "epoch": 0.22391540973410046, "percentage": 7.47, "elapsed_time": "3:21:55", "remaining_time": "1 day, 17:40:32"}
|
| 19 |
+
{"current_steps": 190, "total_steps": 2409, "loss": 0.0422, "lr": 1.5684647302904566e-05, "epoch": 0.23635515471932825, "percentage": 7.89, "elapsed_time": "3:33:06", "remaining_time": "1 day, 17:28:51"}
|
| 20 |
+
{"current_steps": 200, "total_steps": 2409, "loss": 0.0397, "lr": 1.6514522821576764e-05, "epoch": 0.24879489970455607, "percentage": 8.3, "elapsed_time": "3:44:18", "remaining_time": "1 day, 17:17:26"}
|
| 21 |
+
{"current_steps": 210, "total_steps": 2409, "loss": 0.0425, "lr": 1.7344398340248965e-05, "epoch": 0.26123464468978386, "percentage": 8.72, "elapsed_time": "3:55:24", "remaining_time": "1 day, 17:04:58"}
|
| 22 |
+
{"current_steps": 220, "total_steps": 2409, "loss": 0.0375, "lr": 1.8174273858921162e-05, "epoch": 0.2736743896750117, "percentage": 9.13, "elapsed_time": "4:06:35", "remaining_time": "1 day, 16:53:34"}
|
| 23 |
+
{"current_steps": 230, "total_steps": 2409, "loss": 0.0388, "lr": 1.9004149377593363e-05, "epoch": 0.28611413466023944, "percentage": 9.55, "elapsed_time": "4:17:45", "remaining_time": "1 day, 16:42:01"}
|
| 24 |
+
{"current_steps": 240, "total_steps": 2409, "loss": 0.0372, "lr": 1.983402489626556e-05, "epoch": 0.29855387964546726, "percentage": 9.96, "elapsed_time": "4:28:41", "remaining_time": "1 day, 16:28:17"}
|
| 25 |
+
{"current_steps": 250, "total_steps": 2409, "loss": 0.0341, "lr": 1.9999328066483867e-05, "epoch": 0.3109936246306951, "percentage": 10.38, "elapsed_time": "4:39:53", "remaining_time": "1 day, 16:17:11"}
|
| 26 |
+
{"current_steps": 260, "total_steps": 2409, "loss": 0.0367, "lr": 1.99965984913342e-05, "epoch": 0.3234333696159229, "percentage": 10.79, "elapsed_time": "4:51:07", "remaining_time": "1 day, 16:06:17"}
|
| 27 |
+
{"current_steps": 270, "total_steps": 2409, "loss": 0.0337, "lr": 1.999176985141578e-05, "epoch": 0.33587311460115066, "percentage": 11.21, "elapsed_time": "5:02:31", "remaining_time": "1 day, 15:56:43"}
|
| 28 |
+
{"current_steps": 280, "total_steps": 2409, "loss": 0.0308, "lr": 1.998484316063629e-05, "epoch": 0.3483128595863785, "percentage": 11.62, "elapsed_time": "5:13:43", "remaining_time": "1 day, 15:45:23"}
|
| 29 |
+
{"current_steps": 290, "total_steps": 2409, "loss": 0.0328, "lr": 1.997581987344772e-05, "epoch": 0.3607526045716063, "percentage": 12.04, "elapsed_time": "5:24:46", "remaining_time": "1 day, 15:33:04"}
|
| 30 |
+
{"current_steps": 300, "total_steps": 2409, "loss": 0.0301, "lr": 1.9964701884540964e-05, "epoch": 0.37319234955683406, "percentage": 12.45, "elapsed_time": "5:35:55", "remaining_time": "1 day, 15:21:34"}
|
| 31 |
+
{"current_steps": 310, "total_steps": 2409, "loss": 0.0314, "lr": 1.9951491528448005e-05, "epoch": 0.3856320945420619, "percentage": 12.87, "elapsed_time": "5:47:01", "remaining_time": "1 day, 15:09:40"}
|
| 32 |
+
{"current_steps": 320, "total_steps": 2409, "loss": 0.0293, "lr": 1.993619157905169e-05, "epoch": 0.3980718395272897, "percentage": 13.28, "elapsed_time": "5:58:21", "remaining_time": "1 day, 14:59:27"}
|
| 33 |
+
{"current_steps": 330, "total_steps": 2409, "loss": 0.0312, "lr": 1.9918805249003272e-05, "epoch": 0.4105115845125175, "percentage": 13.7, "elapsed_time": "6:09:44", "remaining_time": "1 day, 14:49:23"}
|
| 34 |
+
{"current_steps": 340, "total_steps": 2409, "loss": 0.0287, "lr": 1.9899336189047846e-05, "epoch": 0.4229513294977453, "percentage": 14.11, "elapsed_time": "6:20:45", "remaining_time": "1 day, 14:37:01"}
|
| 35 |
+
{"current_steps": 350, "total_steps": 2409, "loss": 0.0269, "lr": 1.987778848725775e-05, "epoch": 0.4353910744829731, "percentage": 14.53, "elapsed_time": "6:32:08", "remaining_time": "1 day, 14:26:53"}
|
| 36 |
+
{"current_steps": 360, "total_steps": 2409, "loss": 0.03, "lr": 1.985416666817419e-05, "epoch": 0.4478308194682009, "percentage": 14.94, "elapsed_time": "6:43:11", "remaining_time": "1 day, 14:14:50"}
|
| 37 |
+
{"current_steps": 370, "total_steps": 2409, "loss": 0.0271, "lr": 1.9828475691857148e-05, "epoch": 0.46027056445342873, "percentage": 15.36, "elapsed_time": "6:54:26", "remaining_time": "1 day, 14:03:55"}
|
| 38 |
+
{"current_steps": 380, "total_steps": 2409, "loss": 0.0263, "lr": 1.9800720952843906e-05, "epoch": 0.4727103094386565, "percentage": 15.77, "elapsed_time": "7:05:44", "remaining_time": "1 day, 13:53:11"}
|
| 39 |
+
{"current_steps": 390, "total_steps": 2409, "loss": 0.0263, "lr": 1.977090827901631e-05, "epoch": 0.4851500544238843, "percentage": 16.19, "elapsed_time": "7:17:03", "remaining_time": "1 day, 13:42:39"}
|
| 40 |
+
{"current_steps": 400, "total_steps": 2409, "loss": 0.0265, "lr": 1.973904393037703e-05, "epoch": 0.49758979940911213, "percentage": 16.6, "elapsed_time": "7:28:10", "remaining_time": "1 day, 13:30:58"}
|
| 41 |
+
{"current_steps": 410, "total_steps": 2409, "loss": 0.0274, "lr": 1.9705134597735113e-05, "epoch": 0.5100295443943399, "percentage": 17.02, "elapsed_time": "7:39:34", "remaining_time": "1 day, 13:20:44"}
|
| 42 |
+
{"current_steps": 420, "total_steps": 2409, "loss": 0.0285, "lr": 1.966918740130106e-05, "epoch": 0.5224692893795677, "percentage": 17.43, "elapsed_time": "7:50:52", "remaining_time": "1 day, 13:09:57"}
|
| 43 |
+
{"current_steps": 430, "total_steps": 2409, "loss": 0.0256, "lr": 1.9631209889191712e-05, "epoch": 0.5349090343647955, "percentage": 17.85, "elapsed_time": "8:01:59", "remaining_time": "1 day, 12:58:18"}
|
| 44 |
+
{"current_steps": 440, "total_steps": 2409, "loss": 0.025, "lr": 1.959121003584536e-05, "epoch": 0.5473487793500234, "percentage": 18.26, "elapsed_time": "8:13:18", "remaining_time": "1 day, 12:47:33"}
|
| 45 |
+
{"current_steps": 450, "total_steps": 2409, "loss": 0.0258, "lr": 1.954919624034725e-05, "epoch": 0.5597885243352512, "percentage": 18.68, "elapsed_time": "8:24:39", "remaining_time": "1 day, 12:36:58"}
|
| 46 |
+
{"current_steps": 460, "total_steps": 2409, "loss": 0.0226, "lr": 1.950517732466597e-05, "epoch": 0.5722282693204789, "percentage": 19.1, "elapsed_time": "8:35:38", "remaining_time": "1 day, 12:24:47"}
|
| 47 |
+
{"current_steps": 470, "total_steps": 2409, "loss": 0.0243, "lr": 1.9459162531801048e-05, "epoch": 0.5846680143057067, "percentage": 19.51, "elapsed_time": "8:46:58", "remaining_time": "1 day, 12:14:02"}
|
| 48 |
+
{"current_steps": 480, "total_steps": 2409, "loss": 0.0232, "lr": 1.94111615238421e-05, "epoch": 0.5971077592909345, "percentage": 19.93, "elapsed_time": "8:58:09", "remaining_time": "1 day, 12:02:42"}
|
| 49 |
+
{"current_steps": 490, "total_steps": 2409, "loss": 0.025, "lr": 1.936118437994003e-05, "epoch": 0.6095475042761623, "percentage": 20.34, "elapsed_time": "9:09:11", "remaining_time": "1 day, 11:50:47"}
|
| 50 |
+
{"current_steps": 500, "total_steps": 2409, "loss": 0.0247, "lr": 1.9309241594190614e-05, "epoch": 0.6219872492613902, "percentage": 20.76, "elapsed_time": "9:20:37", "remaining_time": "1 day, 11:40:26"}
|
| 51 |
+
{"current_steps": 510, "total_steps": 2409, "loss": 0.0219, "lr": 1.925534407343097e-05, "epoch": 0.634426994246618, "percentage": 21.17, "elapsed_time": "9:31:50", "remaining_time": "1 day, 11:29:15"}
|
| 52 |
+
{"current_steps": 520, "total_steps": 2409, "loss": 0.0222, "lr": 1.919950313494939e-05, "epoch": 0.6468667392318458, "percentage": 21.59, "elapsed_time": "9:43:01", "remaining_time": "1 day, 11:17:56"}
|
| 53 |
+
{"current_steps": 530, "total_steps": 2409, "loss": 0.0234, "lr": 1.9141730504108923e-05, "epoch": 0.6593064842170735, "percentage": 22.0, "elapsed_time": "9:54:07", "remaining_time": "1 day, 11:06:20"}
|
| 54 |
+
{"current_steps": 540, "total_steps": 2409, "loss": 0.0243, "lr": 1.9082038311885326e-05, "epoch": 0.6717462292023013, "percentage": 22.42, "elapsed_time": "10:05:18", "remaining_time": "1 day, 10:55:02"}
|
| 55 |
+
{"current_steps": 550, "total_steps": 2409, "loss": 0.0229, "lr": 1.902043909231984e-05, "epoch": 0.6841859741875291, "percentage": 22.83, "elapsed_time": "10:16:34", "remaining_time": "1 day, 10:44:02"}
|
| 56 |
+
{"current_steps": 560, "total_steps": 2409, "loss": 0.0233, "lr": 1.8956945779887288e-05, "epoch": 0.696625719172757, "percentage": 23.25, "elapsed_time": "10:27:36", "remaining_time": "1 day, 10:32:13"}
|
| 57 |
+
{"current_steps": 570, "total_steps": 2409, "loss": 0.0219, "lr": 1.889157170678015e-05, "epoch": 0.7090654641579848, "percentage": 23.66, "elapsed_time": "10:38:54", "remaining_time": "1 day, 10:21:18"}
|
| 58 |
+
{"current_steps": 580, "total_steps": 2409, "loss": 0.0245, "lr": 1.882433060010907e-05, "epoch": 0.7215052091432126, "percentage": 24.08, "elapsed_time": "10:50:14", "remaining_time": "1 day, 10:10:28"}
|
| 59 |
+
{"current_steps": 590, "total_steps": 2409, "loss": 0.0212, "lr": 1.8755236579020503e-05, "epoch": 0.7339449541284404, "percentage": 24.49, "elapsed_time": "11:01:32", "remaining_time": "1 day, 9:59:35"}
|
| 60 |
+
{"current_steps": 600, "total_steps": 2409, "loss": 0.0226, "lr": 1.868430415173196e-05, "epoch": 0.7463846991136681, "percentage": 24.91, "elapsed_time": "11:12:40", "remaining_time": "1 day, 9:48:08"}
|
| 61 |
+
{"current_steps": 610, "total_steps": 2409, "loss": 0.0237, "lr": 1.861154821248565e-05, "epoch": 0.7588244440988959, "percentage": 25.32, "elapsed_time": "11:23:46", "remaining_time": "1 day, 9:36:35"}
|
| 62 |
+
{"current_steps": 620, "total_steps": 2409, "loss": 0.022, "lr": 1.8536984038420972e-05, "epoch": 0.7712641890841238, "percentage": 25.74, "elapsed_time": "11:34:56", "remaining_time": "1 day, 9:25:14"}
|
| 63 |
+
{"current_steps": 630, "total_steps": 2409, "loss": 0.0218, "lr": 1.84606272863667e-05, "epoch": 0.7837039340693516, "percentage": 26.15, "elapsed_time": "11:46:14", "remaining_time": "1 day, 9:14:16"}
|
| 64 |
+
{"current_steps": 640, "total_steps": 2409, "loss": 0.0213, "lr": 1.8382493989553352e-05, "epoch": 0.7961436790545794, "percentage": 26.57, "elapsed_time": "11:57:08", "remaining_time": "1 day, 9:02:13"}
|
| 65 |
+
{"current_steps": 650, "total_steps": 2409, "loss": 0.0217, "lr": 1.83026005542466e-05, "epoch": 0.8085834240398072, "percentage": 26.98, "elapsed_time": "12:08:23", "remaining_time": "1 day, 8:51:08"}
|
| 66 |
+
{"current_steps": 660, "total_steps": 2409, "loss": 0.019, "lr": 1.8220963756302294e-05, "epoch": 0.821023169025035, "percentage": 27.4, "elapsed_time": "12:19:37", "remaining_time": "1 day, 8:39:59"}
|
| 67 |
+
{"current_steps": 670, "total_steps": 2409, "loss": 0.0217, "lr": 1.8137600737643915e-05, "epoch": 0.8334629140102627, "percentage": 27.81, "elapsed_time": "12:30:57", "remaining_time": "1 day, 8:29:08"}
|
| 68 |
+
{"current_steps": 680, "total_steps": 2409, "loss": 0.0216, "lr": 1.8052529002663142e-05, "epoch": 0.8459026589954906, "percentage": 28.23, "elapsed_time": "12:42:17", "remaining_time": "1 day, 8:18:14"}
|
| 69 |
+
{"current_steps": 690, "total_steps": 2409, "loss": 0.0207, "lr": 1.7965766414544328e-05, "epoch": 0.8583424039807184, "percentage": 28.64, "elapsed_time": "12:53:32", "remaining_time": "1 day, 8:07:07"}
|
| 70 |
+
{"current_steps": 700, "total_steps": 2409, "loss": 0.0204, "lr": 1.7877331191513615e-05, "epoch": 0.8707821489659462, "percentage": 29.06, "elapsed_time": "13:04:57", "remaining_time": "1 day, 7:56:26"}
|
| 71 |
+
{"current_steps": 710, "total_steps": 2409, "loss": 0.0208, "lr": 1.778724190301351e-05, "epoch": 0.883221893951174, "percentage": 29.47, "elapsed_time": "13:16:23", "remaining_time": "1 day, 7:45:44"}
|
| 72 |
+
{"current_steps": 720, "total_steps": 2409, "loss": 0.0203, "lr": 1.769551746580372e-05, "epoch": 0.8956616389364018, "percentage": 29.89, "elapsed_time": "13:27:32", "remaining_time": "1 day, 7:34:20"}
|
| 73 |
+
{"current_steps": 730, "total_steps": 2409, "loss": 0.02, "lr": 1.7602177139989046e-05, "epoch": 0.9081013839216296, "percentage": 30.3, "elapsed_time": "13:38:50", "remaining_time": "1 day, 7:23:20"}
|
| 74 |
+
{"current_steps": 740, "total_steps": 2409, "loss": 0.0193, "lr": 1.750724052497518e-05, "epoch": 0.9205411289068575, "percentage": 30.72, "elapsed_time": "13:50:02", "remaining_time": "1 day, 7:12:05"}
|
| 75 |
+
{"current_steps": 750, "total_steps": 2409, "loss": 0.0187, "lr": 1.7410727555353282e-05, "epoch": 0.9329808738920852, "percentage": 31.13, "elapsed_time": "14:01:21", "remaining_time": "1 day, 7:01:04"}
|
| 76 |
+
{"current_steps": 760, "total_steps": 2409, "loss": 0.0206, "lr": 1.731265849671413e-05, "epoch": 0.945420618877313, "percentage": 31.55, "elapsed_time": "14:12:24", "remaining_time": "1 day, 6:49:30"}
|
| 77 |
+
{"current_steps": 770, "total_steps": 2409, "loss": 0.0208, "lr": 1.721305394139282e-05, "epoch": 0.9578603638625408, "percentage": 31.96, "elapsed_time": "14:23:46", "remaining_time": "1 day, 6:38:36"}
|
| 78 |
+
{"current_steps": 780, "total_steps": 2409, "loss": 0.0196, "lr": 1.7111934804144804e-05, "epoch": 0.9703001088477686, "percentage": 32.38, "elapsed_time": "14:34:53", "remaining_time": "1 day, 6:27:11"}
|
| 79 |
+
{"current_steps": 790, "total_steps": 2409, "loss": 0.0197, "lr": 1.700932231775428e-05, "epoch": 0.9827398538329964, "percentage": 32.79, "elapsed_time": "14:46:00", "remaining_time": "1 day, 6:15:44"}
|
| 80 |
+
{"current_steps": 800, "total_steps": 2409, "loss": 0.0176, "lr": 1.6905238028575764e-05, "epoch": 0.9951795988182243, "percentage": 33.21, "elapsed_time": "14:57:03", "remaining_time": "1 day, 6:04:11"}
|
| 81 |
+
{"current_steps": 810, "total_steps": 2409, "loss": 0.0194, "lr": 1.679970379200983e-05, "epoch": 1.0074638469911368, "percentage": 33.62, "elapsed_time": "15:08:08", "remaining_time": "1 day, 5:52:44"}
|
| 82 |
+
{"current_steps": 820, "total_steps": 2409, "loss": 0.0179, "lr": 1.669274176791395e-05, "epoch": 1.0199035919763644, "percentage": 34.04, "elapsed_time": "15:19:26", "remaining_time": "1 day, 5:41:41"}
|
| 83 |
+
{"current_steps": 830, "total_steps": 2409, "loss": 0.0174, "lr": 1.6584374415949446e-05, "epoch": 1.0323433369615922, "percentage": 34.45, "elapsed_time": "15:30:44", "remaining_time": "1 day, 5:30:40"}
|
| 84 |
+
{"current_steps": 840, "total_steps": 2409, "loss": 0.0172, "lr": 1.6474624490865417e-05, "epoch": 1.04478308194682, "percentage": 34.87, "elapsed_time": "15:42:00", "remaining_time": "1 day, 5:19:32"}
|
| 85 |
+
{"current_steps": 850, "total_steps": 2409, "loss": 0.0178, "lr": 1.6363515037720774e-05, "epoch": 1.0572228269320478, "percentage": 35.28, "elapsed_time": "15:53:13", "remaining_time": "1 day, 5:08:19"}
|
| 86 |
+
{"current_steps": 860, "total_steps": 2409, "loss": 0.017, "lr": 1.6251069387045277e-05, "epoch": 1.0696625719172757, "percentage": 35.7, "elapsed_time": "16:04:37", "remaining_time": "1 day, 4:57:26"}
|
| 87 |
+
{"current_steps": 870, "total_steps": 2409, "loss": 0.0197, "lr": 1.6137311149940633e-05, "epoch": 1.0821023169025035, "percentage": 36.11, "elapsed_time": "16:15:59", "remaining_time": "1 day, 4:46:30"}
|
| 88 |
+
{"current_steps": 880, "total_steps": 2409, "loss": 0.0169, "lr": 1.6022264213122694e-05, "epoch": 1.0945420618877313, "percentage": 36.53, "elapsed_time": "16:27:08", "remaining_time": "1 day, 4:35:10"}
|
| 89 |
+
{"current_steps": 890, "total_steps": 2409, "loss": 0.0182, "lr": 1.5905952733905777e-05, "epoch": 1.106981806872959, "percentage": 36.94, "elapsed_time": "16:38:20", "remaining_time": "1 day, 4:23:54"}
|
| 90 |
+
{"current_steps": 900, "total_steps": 2409, "loss": 0.019, "lr": 1.578840113513015e-05, "epoch": 1.119421551858187, "percentage": 37.36, "elapsed_time": "16:49:31", "remaining_time": "1 day, 4:12:38"}
|
| 91 |
+
{"current_steps": 910, "total_steps": 2409, "loss": 0.0177, "lr": 1.5669634100033798e-05, "epoch": 1.1318612968434147, "percentage": 37.78, "elapsed_time": "17:00:38", "remaining_time": "1 day, 4:01:14"}
|
| 92 |
+
{"current_steps": 920, "total_steps": 2409, "loss": 0.0174, "lr": 1.5549676567069448e-05, "epoch": 1.1443010418286426, "percentage": 38.19, "elapsed_time": "17:12:01", "remaining_time": "1 day, 3:50:19"}
|
| 93 |
+
{"current_steps": 930, "total_steps": 2409, "loss": 0.0181, "lr": 1.5428553724668103e-05, "epoch": 1.1567407868138704, "percentage": 38.61, "elapsed_time": "17:23:16", "remaining_time": "1 day, 3:39:08"}
|
| 94 |
+
{"current_steps": 940, "total_steps": 2409, "loss": 0.0171, "lr": 1.5306291005949964e-05, "epoch": 1.1691805317990982, "percentage": 39.02, "elapsed_time": "17:34:51", "remaining_time": "1 day, 3:28:29"}
|
| 95 |
+
{"current_steps": 950, "total_steps": 2409, "loss": 0.0159, "lr": 1.518291408338409e-05, "epoch": 1.181620276784326, "percentage": 39.44, "elapsed_time": "17:46:08", "remaining_time": "1 day, 3:17:22"}
|
| 96 |
+
{"current_steps": 960, "total_steps": 2409, "loss": 0.0174, "lr": 1.5058448863397716e-05, "epoch": 1.1940600217695536, "percentage": 39.85, "elapsed_time": "17:57:11", "remaining_time": "1 day, 3:05:53"}
|
| 97 |
+
{"current_steps": 970, "total_steps": 2409, "loss": 0.0154, "lr": 1.4932921480936491e-05, "epoch": 1.2064997667547814, "percentage": 40.27, "elapsed_time": "18:08:34", "remaining_time": "1 day, 2:54:54"}
|
| 98 |
+
{"current_steps": 980, "total_steps": 2409, "loss": 0.0169, "lr": 1.48063582939767e-05, "epoch": 1.2189395117400093, "percentage": 40.68, "elapsed_time": "18:19:57", "remaining_time": "1 day, 2:43:55"}
|
| 99 |
+
{"current_steps": 990, "total_steps": 2409, "loss": 0.0166, "lr": 1.4678785877990699e-05, "epoch": 1.231379256725237, "percentage": 41.1, "elapsed_time": "18:31:12", "remaining_time": "1 day, 2:32:44"}
|
| 100 |
+
{"current_steps": 1000, "total_steps": 2409, "loss": 0.0174, "lr": 1.4550231020366619e-05, "epoch": 1.2438190017104649, "percentage": 41.51, "elapsed_time": "18:42:29", "remaining_time": "1 day, 2:21:35"}
|
| 101 |
+
{"current_steps": 1010, "total_steps": 2409, "loss": 0.0166, "lr": 1.4420720714783635e-05, "epoch": 1.2562587466956927, "percentage": 41.93, "elapsed_time": "18:53:59", "remaining_time": "1 day, 2:10:44"}
|
| 102 |
+
{"current_steps": 1020, "total_steps": 2409, "loss": 0.0175, "lr": 1.4290282155543876e-05, "epoch": 1.2686984916809205, "percentage": 42.34, "elapsed_time": "19:05:17", "remaining_time": "1 day, 1:59:37"}
|
| 103 |
+
{"current_steps": 1030, "total_steps": 2409, "loss": 0.0153, "lr": 1.4158942731862229e-05, "epoch": 1.2811382366661483, "percentage": 42.76, "elapsed_time": "19:16:27", "remaining_time": "1 day, 1:48:18"}
|
| 104 |
+
{"current_steps": 1040, "total_steps": 2409, "loss": 0.016, "lr": 1.40267300221152e-05, "epoch": 1.2935779816513762, "percentage": 43.17, "elapsed_time": "19:27:38", "remaining_time": "1 day, 1:37:01"}
|
| 105 |
+
{"current_steps": 1050, "total_steps": 2409, "loss": 0.0155, "lr": 1.3893671788050073e-05, "epoch": 1.306017726636604, "percentage": 43.59, "elapsed_time": "19:38:43", "remaining_time": "1 day, 1:25:36"}
|
| 106 |
+
{"current_steps": 1060, "total_steps": 2409, "loss": 0.0161, "lr": 1.3759795968955555e-05, "epoch": 1.3184574716218318, "percentage": 44.0, "elapsed_time": "19:49:45", "remaining_time": "1 day, 1:14:07"}
|
| 107 |
+
{"current_steps": 1070, "total_steps": 2409, "loss": 0.0159, "lr": 1.3625130675795135e-05, "epoch": 1.3308972166070596, "percentage": 44.42, "elapsed_time": "20:01:00", "remaining_time": "1 day, 1:02:56"}
|
| 108 |
+
{"current_steps": 1080, "total_steps": 2409, "loss": 0.0166, "lr": 1.3489704185304417e-05, "epoch": 1.3433369615922874, "percentage": 44.83, "elapsed_time": "20:12:14", "remaining_time": "1 day, 0:51:43"}
|
| 109 |
+
{"current_steps": 1090, "total_steps": 2409, "loss": 0.0159, "lr": 1.3353544934053618e-05, "epoch": 1.3557767065775153, "percentage": 45.25, "elapsed_time": "20:23:29", "remaining_time": "1 day, 0:40:32"}
|
| 110 |
+
{"current_steps": 1100, "total_steps": 2409, "loss": 0.0169, "lr": 1.3216681512476522e-05, "epoch": 1.368216451562743, "percentage": 45.66, "elapsed_time": "20:34:38", "remaining_time": "1 day, 0:29:13"}
|
| 111 |
+
{"current_steps": 1110, "total_steps": 2409, "loss": 0.0166, "lr": 1.3079142658867124e-05, "epoch": 1.380656196547971, "percentage": 46.08, "elapsed_time": "20:45:54", "remaining_time": "1 day, 0:18:02"}
|
| 112 |
+
{"current_steps": 1120, "total_steps": 2409, "loss": 0.0154, "lr": 1.2940957253345214e-05, "epoch": 1.3930959415331985, "percentage": 46.49, "elapsed_time": "20:57:11", "remaining_time": "1 day, 0:06:53"}
|
| 113 |
+
{"current_steps": 1130, "total_steps": 2409, "loss": 0.0168, "lr": 1.2802154311792196e-05, "epoch": 1.4055356865184263, "percentage": 46.91, "elapsed_time": "21:08:17", "remaining_time": "23:55:31"}
|
| 114 |
+
{"current_steps": 1140, "total_steps": 2409, "loss": 0.0168, "lr": 1.26627629797584e-05, "epoch": 1.4179754315036541, "percentage": 47.32, "elapsed_time": "21:19:31", "remaining_time": "23:44:18"}
|
| 115 |
+
{"current_steps": 1150, "total_steps": 2409, "loss": 0.0153, "lr": 1.2522812526343149e-05, "epoch": 1.430415176488882, "percentage": 47.74, "elapsed_time": "21:30:41", "remaining_time": "23:33:02"}
|
| 116 |
+
{"current_steps": 1160, "total_steps": 2409, "loss": 0.015, "lr": 1.2382332338048899e-05, "epoch": 1.4428549214741098, "percentage": 48.15, "elapsed_time": "21:41:50", "remaining_time": "23:21:43"}
|
| 117 |
+
{"current_steps": 1170, "total_steps": 2409, "loss": 0.0165, "lr": 1.2241351912610726e-05, "epoch": 1.4552946664593376, "percentage": 48.57, "elapsed_time": "21:53:00", "remaining_time": "23:10:26"}
|
| 118 |
+
{"current_steps": 1180, "total_steps": 2409, "loss": 0.0161, "lr": 1.2099900852802449e-05, "epoch": 1.4677344114445654, "percentage": 48.98, "elapsed_time": "22:04:15", "remaining_time": "22:59:14"}
|
| 119 |
+
{"current_steps": 1190, "total_steps": 2409, "loss": 0.015, "lr": 1.1958008860220711e-05, "epoch": 1.4801741564297932, "percentage": 49.4, "elapsed_time": "22:15:21", "remaining_time": "22:47:54"}
|
| 120 |
+
{"current_steps": 1200, "total_steps": 2409, "loss": 0.0153, "lr": 1.1815705729048283e-05, "epoch": 1.492613901415021, "percentage": 49.81, "elapsed_time": "22:26:27", "remaining_time": "22:36:33"}
|
| 121 |
+
{"current_steps": 1210, "total_steps": 2409, "loss": 0.0157, "lr": 1.1673021339797967e-05, "epoch": 1.5050536464002486, "percentage": 50.23, "elapsed_time": "22:37:41", "remaining_time": "22:25:20"}
|
| 122 |
+
{"current_steps": 1220, "total_steps": 2409, "loss": 0.0161, "lr": 1.1529985653038325e-05, "epoch": 1.5174933913854765, "percentage": 50.64, "elapsed_time": "22:48:58", "remaining_time": "22:14:11"}
|
| 123 |
+
{"current_steps": 1230, "total_steps": 2409, "loss": 0.0155, "lr": 1.1386628703102634e-05, "epoch": 1.5299331363707043, "percentage": 51.06, "elapsed_time": "23:00:01", "remaining_time": "22:02:48"}
|
| 124 |
+
{"current_steps": 1240, "total_steps": 2409, "loss": 0.0149, "lr": 1.1242980591782344e-05, "epoch": 1.542372881355932, "percentage": 51.47, "elapsed_time": "23:11:14", "remaining_time": "21:51:34"}
|
| 125 |
+
{"current_steps": 1250, "total_steps": 2409, "loss": 0.0153, "lr": 1.1099071482006361e-05, "epoch": 1.55481262634116, "percentage": 51.89, "elapsed_time": "23:22:04", "remaining_time": "21:39:59"}
|
| 126 |
+
{"current_steps": 1260, "total_steps": 2409, "loss": 0.0148, "lr": 1.0954931591507489e-05, "epoch": 1.5672523713263877, "percentage": 52.3, "elapsed_time": "23:33:06", "remaining_time": "21:28:36"}
|
| 127 |
+
{"current_steps": 1270, "total_steps": 2409, "loss": 0.0145, "lr": 1.0810591186477402e-05, "epoch": 1.5796921163116155, "percentage": 52.72, "elapsed_time": "23:44:17", "remaining_time": "21:17:22"}
|
| 128 |
+
{"current_steps": 1280, "total_steps": 2409, "loss": 0.015, "lr": 1.0666080575211373e-05, "epoch": 1.5921318612968434, "percentage": 53.13, "elapsed_time": "23:55:35", "remaining_time": "21:06:13"}
|
| 129 |
+
{"current_steps": 1290, "total_steps": 2409, "loss": 0.0143, "lr": 1.0521430101744238e-05, "epoch": 1.6045716062820712, "percentage": 53.55, "elapsed_time": "1 day, 0:06:42", "remaining_time": "20:54:56"}
|
| 130 |
+
{"current_steps": 1300, "total_steps": 2409, "loss": 0.0145, "lr": 1.03766701394788e-05, "epoch": 1.617011351267299, "percentage": 53.96, "elapsed_time": "1 day, 0:17:47", "remaining_time": "20:43:36"}
|
| 131 |
+
{"current_steps": 1310, "total_steps": 2409, "loss": 0.0153, "lr": 1.023183108480809e-05, "epoch": 1.6294510962525268, "percentage": 54.38, "elapsed_time": "1 day, 0:28:59", "remaining_time": "20:32:22"}
|
| 132 |
+
{"current_steps": 1320, "total_steps": 2409, "loss": 0.015, "lr": 1.0086943350732797e-05, "epoch": 1.6418908412377546, "percentage": 54.79, "elapsed_time": "1 day, 0:40:09", "remaining_time": "20:21:07"}
|
| 133 |
+
{"current_steps": 1330, "total_steps": 2409, "loss": 0.0154, "lr": 9.942037360475205e-06, "epoch": 1.6543305862229825, "percentage": 55.21, "elapsed_time": "1 day, 0:51:17", "remaining_time": "20:09:51"}
|
| 134 |
+
{"current_steps": 1340, "total_steps": 2409, "loss": 0.0145, "lr": 9.797143541091e-06, "epoch": 1.6667703312082103, "percentage": 55.62, "elapsed_time": "1 day, 1:02:25", "remaining_time": "19:58:34"}
|
| 135 |
+
{"current_steps": 1350, "total_steps": 2409, "loss": 0.0151, "lr": 9.65229231708025e-06, "epoch": 1.679210076193438, "percentage": 56.04, "elapsed_time": "1 day, 1:13:37", "remaining_time": "19:47:21"}
|
| 136 |
+
{"current_steps": 1360, "total_steps": 2409, "loss": 0.0155, "lr": 9.507514103998936e-06, "epoch": 1.691649821178666, "percentage": 56.45, "elapsed_time": "1 day, 1:24:58", "remaining_time": "19:36:15"}
|
| 137 |
+
{"current_steps": 1370, "total_steps": 2409, "loss": 0.014, "lr": 9.362839302072354e-06, "epoch": 1.7040895661638937, "percentage": 56.87, "elapsed_time": "1 day, 1:36:03", "remaining_time": "19:24:55"}
|
| 138 |
+
{"current_steps": 1380, "total_steps": 2409, "loss": 0.0144, "lr": 9.218298289811748e-06, "epoch": 1.7165293111491216, "percentage": 57.29, "elapsed_time": "1 day, 1:47:12", "remaining_time": "19:13:40"}
|
| 139 |
+
{"current_steps": 1390, "total_steps": 2409, "loss": 0.0142, "lr": 9.073921417635485e-06, "epoch": 1.7289690561343494, "percentage": 57.7, "elapsed_time": "1 day, 1:58:07", "remaining_time": "19:02:14"}
|
| 140 |
+
{"current_steps": 1400, "total_steps": 2409, "loss": 0.0149, "lr": 8.92973900149615e-06, "epoch": 1.7414088011195772, "percentage": 58.12, "elapsed_time": "1 day, 2:09:14", "remaining_time": "18:50:58"}
|
| 141 |
+
{"current_steps": 1410, "total_steps": 2409, "loss": 0.0149, "lr": 8.785781316514841e-06, "epoch": 1.753848546104805, "percentage": 58.53, "elapsed_time": "1 day, 2:20:19", "remaining_time": "18:39:40"}
|
| 142 |
+
{"current_steps": 1420, "total_steps": 2409, "loss": 0.0157, "lr": 8.642078590624097e-06, "epoch": 1.7662882910900326, "percentage": 58.95, "elapsed_time": "1 day, 2:31:30", "remaining_time": "18:28:27"}
|
| 143 |
+
{"current_steps": 1430, "total_steps": 2409, "loss": 0.0148, "lr": 8.498660998220669e-06, "epoch": 1.7787280360752604, "percentage": 59.36, "elapsed_time": "1 day, 2:42:26", "remaining_time": "18:17:03"}
|
| 144 |
+
{"current_steps": 1440, "total_steps": 2409, "loss": 0.0148, "lr": 8.355558653829586e-06, "epoch": 1.7911677810604882, "percentage": 59.78, "elapsed_time": "1 day, 2:53:30", "remaining_time": "18:05:45"}
|
| 145 |
+
{"current_steps": 1450, "total_steps": 2409, "loss": 0.0137, "lr": 8.212801605780754e-06, "epoch": 1.803607526045716, "percentage": 60.19, "elapsed_time": "1 day, 3:04:48", "remaining_time": "17:54:36"}
|
| 146 |
+
{"current_steps": 1460, "total_steps": 2409, "loss": 0.0136, "lr": 8.07041982989948e-06, "epoch": 1.8160472710309439, "percentage": 60.61, "elapsed_time": "1 day, 3:16:04", "remaining_time": "17:43:26"}
|
| 147 |
+
{"current_steps": 1470, "total_steps": 2409, "loss": 0.0131, "lr": 7.928443223212216e-06, "epoch": 1.8284870160161717, "percentage": 61.02, "elapsed_time": "1 day, 3:27:14", "remaining_time": "17:32:12"}
|
| 148 |
+
{"current_steps": 1480, "total_steps": 2409, "loss": 0.0139, "lr": 7.786901597668824e-06, "epoch": 1.8409267610013995, "percentage": 61.44, "elapsed_time": "1 day, 3:38:25", "remaining_time": "17:21:00"}
|
| 149 |
+
{"current_steps": 1490, "total_steps": 2409, "loss": 0.0128, "lr": 7.64582467388275e-06, "epoch": 1.853366505986627, "percentage": 61.85, "elapsed_time": "1 day, 3:49:46", "remaining_time": "17:09:52"}
|
| 150 |
+
{"current_steps": 1500, "total_steps": 2409, "loss": 0.0149, "lr": 7.505242074890333e-06, "epoch": 1.865806250971855, "percentage": 62.27, "elapsed_time": "1 day, 4:01:01", "remaining_time": "16:58:42"}
|
| 151 |
+
{"current_steps": 1510, "total_steps": 2409, "loss": 0.0141, "lr": 7.3651833199306355e-06, "epoch": 1.8782459959570827, "percentage": 62.68, "elapsed_time": "1 day, 4:12:39", "remaining_time": "16:47:44"}
|
| 152 |
+
{"current_steps": 1520, "total_steps": 2409, "loss": 0.0143, "lr": 7.225677818247041e-06, "epoch": 1.8906857409423106, "percentage": 63.1, "elapsed_time": "1 day, 4:23:50", "remaining_time": "16:36:31"}
|
| 153 |
+
{"current_steps": 1530, "total_steps": 2409, "loss": 0.0147, "lr": 7.086754862911982e-06, "epoch": 1.9031254859275384, "percentage": 63.51, "elapsed_time": "1 day, 4:35:14", "remaining_time": "16:25:25"}
|
| 154 |
+
{"current_steps": 1540, "total_steps": 2409, "loss": 0.0141, "lr": 6.948443624676017e-06, "epoch": 1.9155652309127662, "percentage": 63.93, "elapsed_time": "1 day, 4:46:17", "remaining_time": "16:14:07"}
|
| 155 |
+
{"current_steps": 1550, "total_steps": 2409, "loss": 0.0145, "lr": 6.810773145842653e-06, "epoch": 1.928004975897994, "percentage": 64.34, "elapsed_time": "1 day, 4:57:30", "remaining_time": "16:02:55"}
|
| 156 |
+
{"current_steps": 1560, "total_steps": 2409, "loss": 0.0138, "lr": 6.673772334170063e-06, "epoch": 1.9404447208832218, "percentage": 64.76, "elapsed_time": "1 day, 5:08:34", "remaining_time": "15:51:37"}
|
| 157 |
+
{"current_steps": 1570, "total_steps": 2409, "loss": 0.0142, "lr": 6.537469956801128e-06, "epoch": 1.9528844658684497, "percentage": 65.17, "elapsed_time": "1 day, 5:19:46", "remaining_time": "15:40:24"}
|
| 158 |
+
{"current_steps": 1580, "total_steps": 2409, "loss": 0.0126, "lr": 6.4018946342229495e-06, "epoch": 1.9653242108536775, "percentage": 65.59, "elapsed_time": "1 day, 5:30:46", "remaining_time": "15:29:05"}
|
| 159 |
+
{"current_steps": 1590, "total_steps": 2409, "loss": 0.0132, "lr": 6.267074834257199e-06, "epoch": 1.9777639558389053, "percentage": 66.0, "elapsed_time": "1 day, 5:41:53", "remaining_time": "15:17:50"}
|
| 160 |
+
{"current_steps": 1600, "total_steps": 2409, "loss": 0.0139, "lr": 6.1330388660824906e-06, "epoch": 1.9902037008241331, "percentage": 66.42, "elapsed_time": "1 day, 5:53:14", "remaining_time": "15:06:42"}
|
| 161 |
+
{"current_steps": 1610, "total_steps": 2409, "loss": 0.013, "lr": 5.999814874290084e-06, "epoch": 2.0024879489970457, "percentage": 66.83, "elapsed_time": "1 day, 6:04:22", "remaining_time": "14:55:28"}
|
| 162 |
+
{"current_steps": 1620, "total_steps": 2409, "loss": 0.0113, "lr": 5.867430832974145e-06, "epoch": 2.0149276939822736, "percentage": 67.25, "elapsed_time": "1 day, 6:15:33", "remaining_time": "14:44:14"}
|
| 163 |
+
{"current_steps": 1630, "total_steps": 2409, "loss": 0.0122, "lr": 5.735914539857799e-06, "epoch": 2.0273674389675014, "percentage": 67.66, "elapsed_time": "1 day, 6:26:34", "remaining_time": "14:32:56"}
|
| 164 |
+
{"current_steps": 1640, "total_steps": 2409, "loss": 0.0125, "lr": 5.60529361045623e-06, "epoch": 2.0398071839527288, "percentage": 68.08, "elapsed_time": "1 day, 6:37:47", "remaining_time": "14:21:44"}
|
| 165 |
+
{"current_steps": 1650, "total_steps": 2409, "loss": 0.0116, "lr": 5.4755954722780236e-06, "epoch": 2.0522469289379566, "percentage": 68.49, "elapsed_time": "1 day, 6:49:08", "remaining_time": "14:10:36"}
|
| 166 |
+
{"current_steps": 1660, "total_steps": 2409, "loss": 0.0119, "lr": 5.346847359066006e-06, "epoch": 2.0646866739231844, "percentage": 68.91, "elapsed_time": "1 day, 7:00:17", "remaining_time": "13:59:22"}
|
| 167 |
+
{"current_steps": 1670, "total_steps": 2409, "loss": 0.0118, "lr": 5.21907630507875e-06, "epoch": 2.077126418908412, "percentage": 69.32, "elapsed_time": "1 day, 7:11:27", "remaining_time": "13:48:09"}
|
| 168 |
+
{"current_steps": 1680, "total_steps": 2409, "loss": 0.0116, "lr": 5.092309139413982e-06, "epoch": 2.08956616389364, "percentage": 69.74, "elapsed_time": "1 day, 7:22:38", "remaining_time": "13:36:56"}
|
| 169 |
+
{"current_steps": 1690, "total_steps": 2409, "loss": 0.0117, "lr": 4.966572480375076e-06, "epoch": 2.102005908878868, "percentage": 70.15, "elapsed_time": "1 day, 7:33:49", "remaining_time": "13:25:42"}
|
| 170 |
+
{"current_steps": 1700, "total_steps": 2409, "loss": 0.0112, "lr": 4.84189272988177e-06, "epoch": 2.1144456538640957, "percentage": 70.57, "elapsed_time": "1 day, 7:45:01", "remaining_time": "13:14:30"}
|
| 171 |
+
{"current_steps": 1710, "total_steps": 2409, "loss": 0.0118, "lr": 4.71829606792639e-06, "epoch": 2.1268853988493235, "percentage": 70.98, "elapsed_time": "1 day, 7:56:15", "remaining_time": "13:03:18"}
|
| 172 |
+
{"current_steps": 1720, "total_steps": 2409, "loss": 0.0122, "lr": 4.595808447076578e-06, "epoch": 2.1393251438345513, "percentage": 71.4, "elapsed_time": "1 day, 8:07:18", "remaining_time": "12:52:02"}
|
| 173 |
+
{"current_steps": 1730, "total_steps": 2409, "loss": 0.0113, "lr": 4.47445558702587e-06, "epoch": 2.151764888819779, "percentage": 71.81, "elapsed_time": "1 day, 8:18:21", "remaining_time": "12:40:46"}
|
| 174 |
+
{"current_steps": 1740, "total_steps": 2409, "loss": 0.0119, "lr": 4.354262969193097e-06, "epoch": 2.164204633805007, "percentage": 72.23, "elapsed_time": "1 day, 8:29:30", "remaining_time": "12:29:33"}
|
| 175 |
+
{"current_steps": 1750, "total_steps": 2409, "loss": 0.0115, "lr": 4.235255831371879e-06, "epoch": 2.1766443787902348, "percentage": 72.64, "elapsed_time": "1 day, 8:40:36", "remaining_time": "12:18:18"}
|
| 176 |
+
{"current_steps": 1760, "total_steps": 2409, "loss": 0.0135, "lr": 4.1174591624312235e-06, "epoch": 2.1890841237754626, "percentage": 73.06, "elapsed_time": "1 day, 8:51:50", "remaining_time": "12:07:07"}
|
| 177 |
+
{"current_steps": 1770, "total_steps": 2409, "loss": 0.0111, "lr": 4.000897697068418e-06, "epoch": 2.2015238687606904, "percentage": 73.47, "elapsed_time": "1 day, 9:03:13", "remaining_time": "11:55:58"}
|
| 178 |
+
{"current_steps": 1780, "total_steps": 2409, "loss": 0.0112, "lr": 3.88559591061531e-06, "epoch": 2.213963613745918, "percentage": 73.89, "elapsed_time": "1 day, 9:14:26", "remaining_time": "11:44:46"}
|
| 179 |
+
{"current_steps": 1790, "total_steps": 2409, "loss": 0.0115, "lr": 3.7715780138989965e-06, "epoch": 2.226403358731146, "percentage": 74.3, "elapsed_time": "1 day, 9:25:36", "remaining_time": "11:33:33"}
|
| 180 |
+
{"current_steps": 1800, "total_steps": 2409, "loss": 0.0104, "lr": 3.658867948158119e-06, "epoch": 2.238843103716374, "percentage": 74.72, "elapsed_time": "1 day, 9:36:48", "remaining_time": "11:22:21"}
|
| 181 |
+
{"current_steps": 1810, "total_steps": 2409, "loss": 0.0111, "lr": 3.5474893800157005e-06, "epoch": 2.2512828487016017, "percentage": 75.13, "elapsed_time": "1 day, 9:48:09", "remaining_time": "11:11:11"}
|
| 182 |
+
{"current_steps": 1820, "total_steps": 2409, "loss": 0.0119, "lr": 3.4374656965097086e-06, "epoch": 2.2637225936868295, "percentage": 75.55, "elapsed_time": "1 day, 9:59:31", "remaining_time": "11:00:02"}
|
| 183 |
+
{"current_steps": 1830, "total_steps": 2409, "loss": 0.0112, "lr": 3.3288200001822624e-06, "epoch": 2.2761623386720573, "percentage": 75.97, "elapsed_time": "1 day, 10:10:36", "remaining_time": "10:48:47"}
|
| 184 |
+
{"current_steps": 1840, "total_steps": 2409, "loss": 0.0127, "lr": 3.22157510422864e-06, "epoch": 2.288602083657285, "percentage": 76.38, "elapsed_time": "1 day, 10:21:45", "remaining_time": "10:37:34"}
|
| 185 |
+
{"current_steps": 1850, "total_steps": 2409, "loss": 0.0121, "lr": 3.115753527706986e-06, "epoch": 2.301041828642513, "percentage": 76.8, "elapsed_time": "1 day, 10:33:14", "remaining_time": "10:26:27"}
|
| 186 |
+
{"current_steps": 1860, "total_steps": 2409, "loss": 0.0118, "lr": 3.0113774908098257e-06, "epoch": 2.3134815736277408, "percentage": 77.21, "elapsed_time": "1 day, 10:44:30", "remaining_time": "10:15:16"}
|
| 187 |
+
{"current_steps": 1870, "total_steps": 2409, "loss": 0.0117, "lr": 2.9084689101983076e-06, "epoch": 2.3259213186129686, "percentage": 77.63, "elapsed_time": "1 day, 10:55:47", "remaining_time": "10:04:04"}
|
| 188 |
+
{"current_steps": 1880, "total_steps": 2409, "loss": 0.0113, "lr": 2.8070493944001843e-06, "epoch": 2.3383610635981964, "percentage": 78.04, "elapsed_time": "1 day, 11:07:04", "remaining_time": "9:52:53"}
|
| 189 |
+
{"current_steps": 1890, "total_steps": 2409, "loss": 0.0114, "lr": 2.70714023927251e-06, "epoch": 2.350800808583424, "percentage": 78.46, "elapsed_time": "1 day, 11:18:21", "remaining_time": "9:41:42"}
|
| 190 |
+
{"current_steps": 1900, "total_steps": 2409, "loss": 0.0133, "lr": 2.608762423529969e-06, "epoch": 2.363240553568652, "percentage": 78.87, "elapsed_time": "1 day, 11:29:41", "remaining_time": "9:30:32"}
|
| 191 |
+
{"current_steps": 1910, "total_steps": 2409, "loss": 0.0106, "lr": 2.5119366043398265e-06, "epoch": 2.3756802985538794, "percentage": 79.29, "elapsed_time": "1 day, 11:40:59", "remaining_time": "9:19:20"}
|
| 192 |
+
{"current_steps": 1920, "total_steps": 2409, "loss": 0.0115, "lr": 2.416683112984355e-06, "epoch": 2.3881200435391072, "percentage": 79.7, "elapsed_time": "1 day, 11:52:04", "remaining_time": "9:08:06"}
|
| 193 |
+
{"current_steps": 1930, "total_steps": 2409, "loss": 0.0102, "lr": 2.323021950591743e-06, "epoch": 2.400559788524335, "percentage": 80.12, "elapsed_time": "1 day, 12:03:19", "remaining_time": "8:56:54"}
|
| 194 |
+
{"current_steps": 1940, "total_steps": 2409, "loss": 0.0114, "lr": 2.2309727839362716e-06, "epoch": 2.412999533509563, "percentage": 80.53, "elapsed_time": "1 day, 12:14:40", "remaining_time": "8:45:43"}
|
| 195 |
+
{"current_steps": 1950, "total_steps": 2409, "loss": 0.0115, "lr": 2.1405549413087543e-06, "epoch": 2.4254392784947907, "percentage": 80.95, "elapsed_time": "1 day, 12:26:01", "remaining_time": "8:34:33"}
|
| 196 |
+
{"current_steps": 1960, "total_steps": 2409, "loss": 0.0105, "lr": 2.051787408457997e-06, "epoch": 2.4378790234800185, "percentage": 81.36, "elapsed_time": "1 day, 12:37:09", "remaining_time": "8:23:19"}
|
| 197 |
+
{"current_steps": 1970, "total_steps": 2409, "loss": 0.0112, "lr": 1.964688824604234e-06, "epoch": 2.4503187684652463, "percentage": 81.78, "elapsed_time": "1 day, 12:48:19", "remaining_time": "8:12:06"}
|
| 198 |
+
{"current_steps": 1980, "total_steps": 2409, "loss": 0.0122, "lr": 1.8792774785252798e-06, "epoch": 2.462758513450474, "percentage": 82.19, "elapsed_time": "1 day, 12:59:29", "remaining_time": "8:00:53"}
|
| 199 |
+
{"current_steps": 1990, "total_steps": 2409, "loss": 0.0104, "lr": 1.795571304716316e-06, "epoch": 2.475198258435702, "percentage": 82.61, "elapsed_time": "1 day, 13:10:48", "remaining_time": "7:49:42"}
|
| 200 |
+
{"current_steps": 2000, "total_steps": 2409, "loss": 0.0114, "lr": 1.713587879624019e-06, "epoch": 2.4876380034209298, "percentage": 83.02, "elapsed_time": "1 day, 13:22:04", "remaining_time": "7:38:30"}
|
| 201 |
+
{"current_steps": 2010, "total_steps": 2409, "loss": 0.0109, "lr": 1.6333444179559078e-06, "epoch": 2.5000777484061576, "percentage": 83.44, "elapsed_time": "1 day, 13:33:38", "remaining_time": "7:27:21"}
|
| 202 |
+
{"current_steps": 2020, "total_steps": 2409, "loss": 0.0107, "lr": 1.5548577690656386e-06, "epoch": 2.5125174933913854, "percentage": 83.85, "elapsed_time": "1 day, 13:44:42", "remaining_time": "7:16:07"}
|
| 203 |
+
{"current_steps": 2030, "total_steps": 2409, "loss": 0.0103, "lr": 1.4781444134150048e-06, "epoch": 2.5249572383766132, "percentage": 84.27, "elapsed_time": "1 day, 13:55:50", "remaining_time": "7:04:53"}
|
| 204 |
+
{"current_steps": 2040, "total_steps": 2409, "loss": 0.0115, "lr": 1.4032204591134092e-06, "epoch": 2.537396983361841, "percentage": 84.68, "elapsed_time": "1 day, 14:06:46", "remaining_time": "6:53:38"}
|
| 205 |
+
{"current_steps": 2050, "total_steps": 2409, "loss": 0.0109, "lr": 1.3301016385355093e-06, "epoch": 2.549836728347069, "percentage": 85.1, "elapsed_time": "1 day, 14:18:04", "remaining_time": "6:42:26"}
|
| 206 |
+
{"current_steps": 2060, "total_steps": 2409, "loss": 0.0118, "lr": 1.2588033050177828e-06, "epoch": 2.5622764733322967, "percentage": 85.51, "elapsed_time": "1 day, 14:29:17", "remaining_time": "6:31:13"}
|
| 207 |
+
{"current_steps": 2070, "total_steps": 2409, "loss": 0.0101, "lr": 1.1893404296346422e-06, "epoch": 2.5747162183175245, "percentage": 85.93, "elapsed_time": "1 day, 14:40:25", "remaining_time": "6:20:00"}
|
| 208 |
+
{"current_steps": 2080, "total_steps": 2409, "loss": 0.0109, "lr": 1.121727598054867e-06, "epoch": 2.5871559633027523, "percentage": 86.34, "elapsed_time": "1 day, 14:51:41", "remaining_time": "6:08:48"}
|
| 209 |
+
{"current_steps": 2090, "total_steps": 2409, "loss": 0.0102, "lr": 1.0559790074789134e-06, "epoch": 2.59959570828798, "percentage": 86.76, "elapsed_time": "1 day, 15:02:52", "remaining_time": "5:57:35"}
|
| 210 |
+
{"current_steps": 2100, "total_steps": 2409, "loss": 0.0105, "lr": 9.921084636578237e-07, "epoch": 2.612035453273208, "percentage": 87.17, "elapsed_time": "1 day, 15:14:17", "remaining_time": "5:46:25"}
|
| 211 |
+
{"current_steps": 2110, "total_steps": 2409, "loss": 0.0107, "lr": 9.301293779943321e-07, "epoch": 2.624475198258436, "percentage": 87.59, "elapsed_time": "1 day, 15:25:40", "remaining_time": "5:35:13"}
|
| 212 |
+
{"current_steps": 2120, "total_steps": 2409, "loss": 0.0127, "lr": 8.700547647267521e-07, "epoch": 2.6369149432436636, "percentage": 88.0, "elapsed_time": "1 day, 15:37:03", "remaining_time": "5:24:02"}
|
| 213 |
+
{"current_steps": 2130, "total_steps": 2409, "loss": 0.0113, "lr": 8.118972381962853e-07, "epoch": 2.6493546882288914, "percentage": 88.42, "elapsed_time": "1 day, 15:47:55", "remaining_time": "5:12:47"}
|
| 214 |
+
{"current_steps": 2140, "total_steps": 2409, "loss": 0.0109, "lr": 7.5566901019828e-07, "epoch": 2.6617944332141192, "percentage": 88.83, "elapsed_time": "1 day, 15:59:11", "remaining_time": "5:01:34"}
|
| 215 |
+
{"current_steps": 2150, "total_steps": 2409, "loss": 0.0109, "lr": 7.013818874180323e-07, "epoch": 2.674234178199347, "percentage": 89.25, "elapsed_time": "1 day, 16:10:19", "remaining_time": "4:50:21"}
|
| 216 |
+
{"current_steps": 2160, "total_steps": 2409, "loss": 0.0121, "lr": 6.490472689516358e-07, "epoch": 2.686673923184575, "percentage": 89.66, "elapsed_time": "1 day, 16:21:26", "remaining_time": "4:39:08"}
|
| 217 |
+
{"current_steps": 2170, "total_steps": 2409, "loss": 0.011, "lr": 5.986761439124289e-07, "epoch": 2.6991136681698027, "percentage": 90.08, "elapsed_time": "1 day, 16:32:35", "remaining_time": "4:27:55"}
|
| 218 |
+
{"current_steps": 2180, "total_steps": 2409, "loss": 0.0104, "lr": 5.502790891235188e-07, "epoch": 2.7115534131550305, "percentage": 90.49, "elapsed_time": "1 day, 16:43:54", "remaining_time": "4:16:43"}
|
| 219 |
+
{"current_steps": 2190, "total_steps": 2409, "loss": 0.0118, "lr": 5.038662668968885e-07, "epoch": 2.7239931581402583, "percentage": 90.91, "elapsed_time": "1 day, 16:54:57", "remaining_time": "4:05:29"}
|
| 220 |
+
{"current_steps": 2200, "total_steps": 2409, "loss": 0.0105, "lr": 4.594474228995305e-07, "epoch": 2.736432903125486, "percentage": 91.32, "elapsed_time": "1 day, 17:06:05", "remaining_time": "3:54:16"}
|
| 221 |
+
{"current_steps": 2210, "total_steps": 2409, "loss": 0.0112, "lr": 4.1703188410707087e-07, "epoch": 2.748872648110714, "percentage": 91.74, "elapsed_time": "1 day, 17:17:13", "remaining_time": "3:43:03"}
|
| 222 |
+
{"current_steps": 2220, "total_steps": 2409, "loss": 0.0103, "lr": 3.7662855684532094e-07, "epoch": 2.761312393095942, "percentage": 92.15, "elapsed_time": "1 day, 17:28:17", "remaining_time": "3:31:50"}
|
| 223 |
+
{"current_steps": 2230, "total_steps": 2409, "loss": 0.0116, "lr": 3.3824592492013087e-07, "epoch": 2.7737521380811696, "percentage": 92.57, "elapsed_time": "1 day, 17:39:39", "remaining_time": "3:20:38"}
|
| 224 |
+
{"current_steps": 2240, "total_steps": 2409, "loss": 0.0108, "lr": 3.018920478359921e-07, "epoch": 2.786191883066397, "percentage": 92.98, "elapsed_time": "1 day, 17:50:50", "remaining_time": "3:09:26"}
|
| 225 |
+
{"current_steps": 2250, "total_steps": 2409, "loss": 0.0103, "lr": 2.6757455910370486e-07, "epoch": 2.798631628051625, "percentage": 93.4, "elapsed_time": "1 day, 18:01:42", "remaining_time": "2:58:12"}
|
| 226 |
+
{"current_steps": 2260, "total_steps": 2409, "loss": 0.0114, "lr": 2.3530066463752665e-07, "epoch": 2.8110713730368526, "percentage": 93.81, "elapsed_time": "1 day, 18:12:58", "remaining_time": "2:46:59"}
|
| 227 |
+
{"current_steps": 2270, "total_steps": 2409, "loss": 0.0111, "lr": 2.0507714124207157e-07, "epoch": 2.8235111180220804, "percentage": 94.23, "elapsed_time": "1 day, 18:24:03", "remaining_time": "2:35:46"}
|
| 228 |
+
{"current_steps": 2280, "total_steps": 2409, "loss": 0.0108, "lr": 1.7691033518934864e-07, "epoch": 2.8359508630073083, "percentage": 94.65, "elapsed_time": "1 day, 18:35:20", "remaining_time": "2:24:34"}
|
| 229 |
+
{"current_steps": 2290, "total_steps": 2409, "loss": 0.0107, "lr": 1.5080616088616884e-07, "epoch": 2.848390607992536, "percentage": 95.06, "elapsed_time": "1 day, 18:46:30", "remaining_time": "2:13:22"}
|
| 230 |
+
{"current_steps": 2300, "total_steps": 2409, "loss": 0.0105, "lr": 1.2677009963226073e-07, "epoch": 2.860830352977764, "percentage": 95.48, "elapsed_time": "1 day, 18:57:39", "remaining_time": "2:02:09"}
|
| 231 |
+
{"current_steps": 2310, "total_steps": 2409, "loss": 0.0114, "lr": 1.0480719846931775e-07, "epoch": 2.8732700979629917, "percentage": 95.89, "elapsed_time": "1 day, 19:08:45", "remaining_time": "1:50:56"}
|
| 232 |
+
{"current_steps": 2320, "total_steps": 2409, "loss": 0.0113, "lr": 8.492206912122492e-08, "epoch": 2.8857098429482195, "percentage": 96.31, "elapsed_time": "1 day, 19:20:01", "remaining_time": "1:39:44"}
|
| 233 |
+
{"current_steps": 2330, "total_steps": 2409, "loss": 0.01, "lr": 6.711888702570556e-08, "epoch": 2.8981495879334473, "percentage": 96.72, "elapsed_time": "1 day, 19:31:06", "remaining_time": "1:28:31"}
|
| 234 |
+
{"current_steps": 2340, "total_steps": 2409, "loss": 0.0114, "lr": 5.140139045756609e-08, "epoch": 2.910589332918675, "percentage": 97.14, "elapsed_time": "1 day, 19:42:21", "remaining_time": "1:17:19"}
|
| 235 |
+
{"current_steps": 2350, "total_steps": 2409, "loss": 0.0105, "lr": 3.7772879743749324e-08, "epoch": 2.923029077903903, "percentage": 97.55, "elapsed_time": "1 day, 19:53:33", "remaining_time": "1:06:07"}
|
| 236 |
+
{"current_steps": 2360, "total_steps": 2409, "loss": 0.0113, "lr": 2.62362165703256e-08, "epoch": 2.935468822889131, "percentage": 97.97, "elapsed_time": "1 day, 20:04:39", "remaining_time": "0:54:54"}
|
| 237 |
+
{"current_steps": 2370, "total_steps": 2409, "loss": 0.0107, "lr": 1.6793823381614506e-08, "epoch": 2.9479085678743586, "percentage": 98.38, "elapsed_time": "1 day, 20:15:47", "remaining_time": "0:43:42"}
|
| 238 |
+
{"current_steps": 2380, "total_steps": 2409, "loss": 0.0112, "lr": 9.447682871519537e-09, "epoch": 2.9603483128595864, "percentage": 98.8, "elapsed_time": "1 day, 20:26:50", "remaining_time": "0:32:29"}
|
| 239 |
+
{"current_steps": 2390, "total_steps": 2409, "loss": 0.0112, "lr": 4.199337567203365e-09, "epoch": 2.9727880578448143, "percentage": 99.21, "elapsed_time": "1 day, 20:38:09", "remaining_time": "0:21:17"}
|
| 240 |
+
{"current_steps": 2400, "total_steps": 2409, "loss": 0.0119, "lr": 1.0498895051991309e-09, "epoch": 2.985227802830042, "percentage": 99.63, "elapsed_time": "1 day, 20:49:31", "remaining_time": "0:10:05"}
|
| 241 |
+
{"current_steps": 2409, "total_steps": 2409, "epoch": 2.996423573316747, "percentage": 100.0, "elapsed_time": "1 day, 20:59:57", "remaining_time": "0:00:00"}
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:effa1646d85eccc09b6d1260a4b37ed92e54e19488d4139baaba0bd450e33b30
|
| 3 |
+
size 7864
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|