| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 6.0, | |
| "eval_steps": 500, | |
| "global_step": 94368, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 0.0, | |
| "loss": 8.4152, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.950000000000001e-06, | |
| "loss": 3.4041, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.950000000000001e-06, | |
| "loss": 1.6651, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.4950000000000001e-05, | |
| "loss": 1.5062, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.995e-05, | |
| "loss": 1.4128, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 2.495e-05, | |
| "loss": 1.347, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 2.995e-05, | |
| "loss": 1.2902, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 3.495e-05, | |
| "loss": 1.2457, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.995e-05, | |
| "loss": 1.2037, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.495e-05, | |
| "loss": 1.1728, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.995e-05, | |
| "loss": 1.1493, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 5.495e-05, | |
| "loss": 1.1211, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 5.995000000000001e-05, | |
| "loss": 1.0958, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 6.494999999999999e-05, | |
| "loss": 1.0736, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 6.995e-05, | |
| "loss": 1.0591, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 7.494e-05, | |
| "loss": 1.0399, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 7.994000000000001e-05, | |
| "loss": 1.0244, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 8.494000000000001e-05, | |
| "loss": 1.0128, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.994e-05, | |
| "loss": 1.0026, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 9.494000000000001e-05, | |
| "loss": 0.9945, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 9.994e-05, | |
| "loss": 0.9798, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 9.941446994120994e-05, | |
| "loss": 0.9709, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 9.882182818130097e-05, | |
| "loss": 0.9623, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.8229186421392e-05, | |
| "loss": 0.9505, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 9.763654466148303e-05, | |
| "loss": 0.9428, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 9.704390290157406e-05, | |
| "loss": 0.9323, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 9.645244642518491e-05, | |
| "loss": 0.9265, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.585980466527594e-05, | |
| "loss": 0.9197, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 9.526716290536697e-05, | |
| "loss": 0.9122, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 9.4674521145458e-05, | |
| "loss": 0.9065, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 9.408187938554904e-05, | |
| "loss": 0.9023, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 9.348923762564006e-05, | |
| "loss": 0.8965, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 9.289659586573108e-05, | |
| "loss": 0.8901, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.230395410582213e-05, | |
| "loss": 0.8822, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.171131234591315e-05, | |
| "loss": 0.8767, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 9.111867058600417e-05, | |
| "loss": 0.8711, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 9.052721410961502e-05, | |
| "loss": 0.8692, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 8.993457234970606e-05, | |
| "loss": 0.8655, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 8.934193058979709e-05, | |
| "loss": 0.861, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 8.874928882988811e-05, | |
| "loss": 0.8566, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 8.815664706997914e-05, | |
| "loss": 0.8509, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 8.756519059359e-05, | |
| "loss": 0.8492, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 8.697373411720084e-05, | |
| "loss": 0.8475, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 8.638109235729187e-05, | |
| "loss": 0.8471, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 8.578845059738289e-05, | |
| "loss": 0.8393, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 8.519580883747393e-05, | |
| "loss": 0.8341, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 8.460316707756496e-05, | |
| "loss": 0.8332, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 8.401052531765598e-05, | |
| "loss": 0.8295, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 8.341788355774702e-05, | |
| "loss": 0.8295, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 8.282524179783805e-05, | |
| "loss": 0.8237, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 8.22337853214489e-05, | |
| "loss": 0.8213, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 8.164232884505975e-05, | |
| "loss": 0.8187, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 8.104968708515077e-05, | |
| "loss": 0.8154, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.04570453252418e-05, | |
| "loss": 0.8133, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 7.986440356533284e-05, | |
| "loss": 0.8064, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 7.927176180542386e-05, | |
| "loss": 0.8087, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 7.86803053290347e-05, | |
| "loss": 0.809, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 7.808766356912574e-05, | |
| "loss": 0.8087, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 7.749502180921678e-05, | |
| "loss": 0.8018, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 7.690356533282762e-05, | |
| "loss": 0.7998, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 7.631092357291864e-05, | |
| "loss": 0.7984, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 7.571828181300968e-05, | |
| "loss": 0.7941, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 7.512564005310071e-05, | |
| "loss": 0.7965, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 7.453299829319173e-05, | |
| "loss": 0.7941, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 7.394035653328276e-05, | |
| "loss": 0.7901, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 7.33477147733738e-05, | |
| "loss": 0.7855, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 7.275507301346483e-05, | |
| "loss": 0.7825, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 7.216243125355585e-05, | |
| "loss": 0.7848, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 7.156978949364688e-05, | |
| "loss": 0.779, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 7.097714773373792e-05, | |
| "loss": 0.7778, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 7.038450597382894e-05, | |
| "loss": 0.7789, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 6.979186421391997e-05, | |
| "loss": 0.7749, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 6.920040773753082e-05, | |
| "loss": 0.7719, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 6.860776597762185e-05, | |
| "loss": 0.7755, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 6.801512421771288e-05, | |
| "loss": 0.7708, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 6.742248245780391e-05, | |
| "loss": 0.7677, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 6.682984069789495e-05, | |
| "loss": 0.766, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 6.623719893798597e-05, | |
| "loss": 0.767, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 6.5644557178077e-05, | |
| "loss": 0.7628, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 6.505191541816802e-05, | |
| "loss": 0.7638, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 6.445927365825906e-05, | |
| "loss": 0.7631, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 6.38666318983501e-05, | |
| "loss": 0.7587, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 6.327636070548075e-05, | |
| "loss": 0.7595, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 6.268371894557177e-05, | |
| "loss": 0.7599, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 6.209107718566282e-05, | |
| "loss": 0.7583, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 6.149843542575384e-05, | |
| "loss": 0.7534, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 6.090579366584487e-05, | |
| "loss": 0.7564, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 6.0313151905935906e-05, | |
| "loss": 0.7527, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 5.9720510146026934e-05, | |
| "loss": 0.7526, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 5.9127868386117956e-05, | |
| "loss": 0.7519, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 5.85364119097288e-05, | |
| "loss": 0.749, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 5.7943770149819844e-05, | |
| "loss": 0.7492, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 5.7351128389910865e-05, | |
| "loss": 0.7468, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 5.6758486630001894e-05, | |
| "loss": 0.745, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 5.616584487009293e-05, | |
| "loss": 0.7479, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 5.557320311018396e-05, | |
| "loss": 0.7431, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 5.4980561350274985e-05, | |
| "loss": 0.7406, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 5.438791959036602e-05, | |
| "loss": 0.7401, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 5.379527783045705e-05, | |
| "loss": 0.7379, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 5.3203821354067895e-05, | |
| "loss": 0.7375, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 5.261117959415892e-05, | |
| "loss": 0.7339, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 5.201853783424996e-05, | |
| "loss": 0.7349, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 5.1425896074340986e-05, | |
| "loss": 0.7348, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 5.083443959795183e-05, | |
| "loss": 0.7319, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 5.024179783804286e-05, | |
| "loss": 0.7271, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 4.964915607813389e-05, | |
| "loss": 0.7309, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 4.905769960174474e-05, | |
| "loss": 0.733, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 4.846505784183577e-05, | |
| "loss": 0.7277, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 4.7872416081926805e-05, | |
| "loss": 0.728, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 4.7279774322017827e-05, | |
| "loss": 0.7268, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 4.668713256210886e-05, | |
| "loss": 0.7261, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 4.609567608571971e-05, | |
| "loss": 0.7251, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 4.5503034325810736e-05, | |
| "loss": 0.7261, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 4.4910392565901764e-05, | |
| "loss": 0.7231, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 4.43177508059928e-05, | |
| "loss": 0.7197, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 4.3726294329603646e-05, | |
| "loss": 0.7249, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 4.313483785321449e-05, | |
| "loss": 0.7204, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 4.254219609330552e-05, | |
| "loss": 0.718, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 4.194955433339655e-05, | |
| "loss": 0.7205, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 4.1356912573487584e-05, | |
| "loss": 0.7164, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 4.076427081357861e-05, | |
| "loss": 0.7194, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 4.017162905366964e-05, | |
| "loss": 0.7166, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 3.9578987293760675e-05, | |
| "loss": 0.7167, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 3.8986345533851696e-05, | |
| "loss": 0.7175, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 3.839370377394273e-05, | |
| "loss": 0.7104, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 3.780106201403376e-05, | |
| "loss": 0.7116, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 3.720842025412479e-05, | |
| "loss": 0.7125, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 3.6615778494215816e-05, | |
| "loss": 0.7098, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 3.602432201782667e-05, | |
| "loss": 0.708, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 3.543168025791769e-05, | |
| "loss": 0.7081, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 3.4839038498008725e-05, | |
| "loss": 0.7058, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 3.4246396738099754e-05, | |
| "loss": 0.7072, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 3.365375497819078e-05, | |
| "loss": 0.7068, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 3.306111321828182e-05, | |
| "loss": 0.706, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 3.246965674189266e-05, | |
| "loss": 0.705, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 3.187701498198369e-05, | |
| "loss": 0.7043, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 3.128437322207472e-05, | |
| "loss": 0.7025, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 3.0691731462165755e-05, | |
| "loss": 0.7019, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 3.0099089702256783e-05, | |
| "loss": 0.703, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 2.9506447942347808e-05, | |
| "loss": 0.7023, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 2.891380618243884e-05, | |
| "loss": 0.7023, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 2.832234970604969e-05, | |
| "loss": 0.7, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 2.772970794614072e-05, | |
| "loss": 0.6976, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 2.7137066186231745e-05, | |
| "loss": 0.7009, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 2.6544424426322777e-05, | |
| "loss": 0.6985, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 2.595178266641381e-05, | |
| "loss": 0.6976, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 2.5359140906504837e-05, | |
| "loss": 0.6988, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 2.4766499146595868e-05, | |
| "loss": 0.6988, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 2.4173857386686896e-05, | |
| "loss": 0.6964, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 2.3582400910297743e-05, | |
| "loss": 0.6972, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 2.2989759150388775e-05, | |
| "loss": 0.6948, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 2.2397117390479806e-05, | |
| "loss": 0.6946, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 2.1804475630570834e-05, | |
| "loss": 0.6927, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 2.1211833870661862e-05, | |
| "loss": 0.6909, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 2.0620377394272712e-05, | |
| "loss": 0.6906, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 2.002892091788356e-05, | |
| "loss": 0.6929, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 1.9436279157974587e-05, | |
| "loss": 0.6917, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 1.884363739806562e-05, | |
| "loss": 0.6925, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 1.8250995638156647e-05, | |
| "loss": 0.6895, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 1.765835387824768e-05, | |
| "loss": 0.686, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 1.706571211833871e-05, | |
| "loss": 0.6882, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 1.6473070358429738e-05, | |
| "loss": 0.6877, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 1.5880428598520766e-05, | |
| "loss": 0.688, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 1.5287786838611798e-05, | |
| "loss": 0.6863, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 1.4696330362222643e-05, | |
| "loss": 0.6882, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "learning_rate": 1.4104873885833491e-05, | |
| "loss": 0.6892, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 1.351223212592452e-05, | |
| "loss": 0.6878, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 5.31, | |
| "learning_rate": 1.2919590366015552e-05, | |
| "loss": 0.6841, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 1.232694860610658e-05, | |
| "loss": 0.6841, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 5.37, | |
| "learning_rate": 1.173430684619761e-05, | |
| "loss": 0.6855, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 1.1141665086288642e-05, | |
| "loss": 0.6833, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 1.054902332637967e-05, | |
| "loss": 0.683, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 9.9563815664707e-06, | |
| "loss": 0.6847, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 9.36373980656173e-06, | |
| "loss": 0.6845, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 8.77109804665276e-06, | |
| "loss": 0.6795, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 8.17845628674379e-06, | |
| "loss": 0.6817, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "learning_rate": 7.585814526834819e-06, | |
| "loss": 0.6796, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 6.994358050445667e-06, | |
| "loss": 0.684, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 6.401716290536697e-06, | |
| "loss": 0.6833, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 5.810259814147544e-06, | |
| "loss": 0.6805, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 5.217618054238574e-06, | |
| "loss": 0.6823, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 4.626161577849421e-06, | |
| "loss": 0.6786, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 4.033519817940452e-06, | |
| "loss": 0.676, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 3.440878058031481e-06, | |
| "loss": 0.6811, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 2.848236298122511e-06, | |
| "loss": 0.6805, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "learning_rate": 2.2567798217333587e-06, | |
| "loss": 0.6805, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 1.6641380618243886e-06, | |
| "loss": 0.6783, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 1.0714963019154182e-06, | |
| "loss": 0.6806, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 4.78854542006448e-07, | |
| "loss": 0.6792, | |
| "step": 94000 | |
| }, | |
| { | |
| "before_init_mem_cpu": 1314717696, | |
| "before_init_mem_gpu": 0, | |
| "epoch": 6.0, | |
| "init_mem_cpu_alloc_delta": 880553984, | |
| "init_mem_cpu_peaked_delta": 93917184, | |
| "init_mem_gpu_alloc_delta": 443396608, | |
| "init_mem_gpu_peaked_delta": 0, | |
| "step": 94368, | |
| "total_flos": 1.9076000524147884e+19, | |
| "train_loss": 0.8106447110522921, | |
| "train_mem_cpu_alloc_delta": 31978672128, | |
| "train_mem_cpu_peaked_delta": 500772864, | |
| "train_mem_gpu_alloc_delta": 1348273664, | |
| "train_mem_gpu_peaked_delta": 64082841600, | |
| "train_runtime": 71211.051, | |
| "train_samples_per_second": 1017.691, | |
| "train_steps_per_second": 1.325 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 94368, | |
| "num_train_epochs": 6, | |
| "save_steps": 500, | |
| "total_flos": 1.9076000524147884e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |