| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.0, |
| "eval_steps": 500, |
| "global_step": 808, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.024752475247524754, |
| "grad_norm": 3.7717831664734938, |
| "learning_rate": 1.111111111111111e-06, |
| "loss": 0.5538, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.04950495049504951, |
| "grad_norm": 1.5623613876454339, |
| "learning_rate": 2.345679012345679e-06, |
| "loss": 0.4992, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.07425742574257425, |
| "grad_norm": 1.2913902697179327, |
| "learning_rate": 3.580246913580247e-06, |
| "loss": 0.3647, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.09900990099009901, |
| "grad_norm": 0.5890685057936159, |
| "learning_rate": 4.814814814814815e-06, |
| "loss": 0.2965, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.12376237623762376, |
| "grad_norm": 1.1014770026412741, |
| "learning_rate": 6.049382716049383e-06, |
| "loss": 0.2868, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.1485148514851485, |
| "grad_norm": 0.8427291729270426, |
| "learning_rate": 7.283950617283952e-06, |
| "loss": 0.2578, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.17326732673267325, |
| "grad_norm": 0.7111238557372528, |
| "learning_rate": 8.518518518518519e-06, |
| "loss": 0.2375, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.19801980198019803, |
| "grad_norm": 0.6520781231804735, |
| "learning_rate": 9.753086419753087e-06, |
| "loss": 0.2463, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.22277227722772278, |
| "grad_norm": 0.5328184668534979, |
| "learning_rate": 9.997012501794273e-06, |
| "loss": 0.2387, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.24752475247524752, |
| "grad_norm": 0.9283511444433159, |
| "learning_rate": 9.984881908680157e-06, |
| "loss": 0.2347, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.2722772277227723, |
| "grad_norm": 0.8219030941347751, |
| "learning_rate": 9.963444133394478e-06, |
| "loss": 0.2298, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.297029702970297, |
| "grad_norm": 0.6999809415899867, |
| "learning_rate": 9.93273920201681e-06, |
| "loss": 0.2211, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.3217821782178218, |
| "grad_norm": 0.6654026186983417, |
| "learning_rate": 9.892824443164987e-06, |
| "loss": 0.2176, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.3465346534653465, |
| "grad_norm": 0.5360045754561646, |
| "learning_rate": 9.84377438095789e-06, |
| "loss": 0.2302, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.3712871287128713, |
| "grad_norm": 0.8762427551610494, |
| "learning_rate": 9.785680595872824e-06, |
| "loss": 0.2248, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.39603960396039606, |
| "grad_norm": 0.8034465851573688, |
| "learning_rate": 9.718651553757266e-06, |
| "loss": 0.2126, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.4207920792079208, |
| "grad_norm": 0.6661288006527506, |
| "learning_rate": 9.642812403314272e-06, |
| "loss": 0.2058, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.44554455445544555, |
| "grad_norm": 0.6773722240919612, |
| "learning_rate": 9.55830474243961e-06, |
| "loss": 0.2191, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.47029702970297027, |
| "grad_norm": 0.5879164256772569, |
| "learning_rate": 9.465286353846905e-06, |
| "loss": 0.2134, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.49504950495049505, |
| "grad_norm": 0.8601137334755529, |
| "learning_rate": 9.36393091047441e-06, |
| "loss": 0.2135, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.5198019801980198, |
| "grad_norm": 0.6739749629917621, |
| "learning_rate": 9.254427651223434e-06, |
| "loss": 0.1966, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.5445544554455446, |
| "grad_norm": 0.6576980236826349, |
| "learning_rate": 9.136981027633834e-06, |
| "loss": 0.2033, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.5693069306930693, |
| "grad_norm": 0.706488237713152, |
| "learning_rate": 9.011810322156269e-06, |
| "loss": 0.2065, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.594059405940594, |
| "grad_norm": 0.510070145931782, |
| "learning_rate": 8.879149238733932e-06, |
| "loss": 0.2075, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.6188118811881188, |
| "grad_norm": 0.8056260628581989, |
| "learning_rate": 8.739245466458187e-06, |
| "loss": 0.1992, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.6435643564356436, |
| "grad_norm": 0.7108545145944659, |
| "learning_rate": 8.592360217112759e-06, |
| "loss": 0.205, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.6683168316831684, |
| "grad_norm": 0.6111437108958193, |
| "learning_rate": 8.438767737469995e-06, |
| "loss": 0.2052, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.693069306930693, |
| "grad_norm": 0.5992778239350629, |
| "learning_rate": 8.278754797249702e-06, |
| "loss": 0.1967, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.7178217821782178, |
| "grad_norm": 0.537914581152557, |
| "learning_rate": 8.11262015369663e-06, |
| "loss": 0.207, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.7425742574257426, |
| "grad_norm": 0.8807609285027707, |
| "learning_rate": 7.940673993776258e-06, |
| "loss": 0.2137, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.7673267326732673, |
| "grad_norm": 0.650059405365673, |
| "learning_rate": 7.763237355030384e-06, |
| "loss": 0.1939, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.7920792079207921, |
| "grad_norm": 0.6184279949354257, |
| "learning_rate": 7.580641526173758e-06, |
| "loss": 0.2038, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.8168316831683168, |
| "grad_norm": 0.5587655976098158, |
| "learning_rate": 7.39322742855097e-06, |
| "loss": 0.2022, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.8415841584158416, |
| "grad_norm": 0.5295208847287186, |
| "learning_rate": 7.201344979608423e-06, |
| "loss": 0.1985, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.8663366336633663, |
| "grad_norm": 0.8125315700245753, |
| "learning_rate": 7.0053524395698345e-06, |
| "loss": 0.2106, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.8910891089108911, |
| "grad_norm": 0.6775574483997845, |
| "learning_rate": 6.805615742535117e-06, |
| "loss": 0.1922, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.9158415841584159, |
| "grad_norm": 0.6084684704334197, |
| "learning_rate": 6.602507813251478e-06, |
| "loss": 0.1971, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.9405940594059405, |
| "grad_norm": 0.5331031723925573, |
| "learning_rate": 6.396407870832419e-06, |
| "loss": 0.1945, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.9653465346534653, |
| "grad_norm": 0.5520343443596168, |
| "learning_rate": 6.187700720724648e-06, |
| "loss": 0.1879, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.9900990099009901, |
| "grad_norm": 1.0525009438684532, |
| "learning_rate": 5.976776036244833e-06, |
| "loss": 0.201, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.0148514851485149, |
| "grad_norm": 0.5379692003711549, |
| "learning_rate": 5.764027631027659e-06, |
| "loss": 0.1561, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.0396039603960396, |
| "grad_norm": 0.5984174599901763, |
| "learning_rate": 5.549852723743564e-06, |
| "loss": 0.1251, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.0643564356435644, |
| "grad_norm": 0.4220751315218362, |
| "learning_rate": 5.334651196459003e-06, |
| "loss": 0.1272, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.0891089108910892, |
| "grad_norm": 0.680523717996421, |
| "learning_rate": 5.118824848023926e-06, |
| "loss": 0.1243, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.113861386138614, |
| "grad_norm": 0.7596976605088543, |
| "learning_rate": 4.902776643880461e-06, |
| "loss": 0.1215, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.1386138613861387, |
| "grad_norm": 0.5594424740783379, |
| "learning_rate": 4.686909963693498e-06, |
| "loss": 0.1242, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.1633663366336633, |
| "grad_norm": 0.620634424573326, |
| "learning_rate": 4.47162784820784e-06, |
| "loss": 0.1181, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.188118811881188, |
| "grad_norm": 0.4254231101088345, |
| "learning_rate": 4.257332246738201e-06, |
| "loss": 0.1162, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.2128712871287128, |
| "grad_norm": 0.7483475123867419, |
| "learning_rate": 4.04442326669695e-06, |
| "loss": 0.1206, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.2376237623762376, |
| "grad_norm": 0.7097619263952759, |
| "learning_rate": 3.833298426560851e-06, |
| "loss": 0.1271, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.2623762376237624, |
| "grad_norm": 0.5726850191754431, |
| "learning_rate": 3.624351913671571e-06, |
| "loss": 0.1172, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.2871287128712872, |
| "grad_norm": 0.5439241474776657, |
| "learning_rate": 3.4179738482556648e-06, |
| "loss": 0.1266, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.311881188118812, |
| "grad_norm": 0.5939398974747236, |
| "learning_rate": 3.214549555038218e-06, |
| "loss": 0.1244, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.3366336633663367, |
| "grad_norm": 0.7350889337676968, |
| "learning_rate": 3.0144588438100693e-06, |
| "loss": 0.1215, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.3613861386138613, |
| "grad_norm": 0.9500186584429013, |
| "learning_rate": 2.8180753002918735e-06, |
| "loss": 0.1238, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.386138613861386, |
| "grad_norm": 0.5015920742742683, |
| "learning_rate": 2.6257655886190147e-06, |
| "loss": 0.1179, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.4108910891089108, |
| "grad_norm": 0.5256757837291076, |
| "learning_rate": 2.4378887667496696e-06, |
| "loss": 0.119, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.4356435643564356, |
| "grad_norm": 0.48451626235324025, |
| "learning_rate": 2.2547956160742473e-06, |
| "loss": 0.1224, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.4603960396039604, |
| "grad_norm": 0.8021372949842518, |
| "learning_rate": 2.0768279864778475e-06, |
| "loss": 0.1156, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.4851485148514851, |
| "grad_norm": 0.6463561099184126, |
| "learning_rate": 1.9043181580785597e-06, |
| "loss": 0.1204, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.50990099009901, |
| "grad_norm": 0.5745060453080356, |
| "learning_rate": 1.73758822083332e-06, |
| "loss": 0.1194, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.5346534653465347, |
| "grad_norm": 0.5752290379243282, |
| "learning_rate": 1.5769494731696206e-06, |
| "loss": 0.1137, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.5594059405940595, |
| "grad_norm": 0.4211483621733392, |
| "learning_rate": 1.4227018407658822e-06, |
| "loss": 0.1153, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.5841584158415842, |
| "grad_norm": 0.7145074104306698, |
| "learning_rate": 1.275133316565691e-06, |
| "loss": 0.1165, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.608910891089109, |
| "grad_norm": 0.5745146397989843, |
| "learning_rate": 1.1345194230714235e-06, |
| "loss": 0.1177, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.6336633663366338, |
| "grad_norm": 0.6092745392474433, |
| "learning_rate": 1.001122697921197e-06, |
| "loss": 0.1118, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.6584158415841586, |
| "grad_norm": 0.49627988399276496, |
| "learning_rate": 8.751922037096328e-07, |
| "loss": 0.1151, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.6831683168316833, |
| "grad_norm": 0.44055150600392545, |
| "learning_rate": 7.569630629676294e-07, |
| "loss": 0.1103, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.7079207920792079, |
| "grad_norm": 0.7390526986739583, |
| "learning_rate": 6.466560191693566e-07, |
| "loss": 0.1175, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.7326732673267327, |
| "grad_norm": 0.6970208272672705, |
| "learning_rate": 5.444770245861553e-07, |
| "loss": 0.1197, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.7574257425742574, |
| "grad_norm": 0.6097731290069269, |
| "learning_rate": 4.506168557567886e-07, |
| "loss": 0.1134, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.7821782178217822, |
| "grad_norm": 0.5244984552185384, |
| "learning_rate": 3.6525075729205274e-07, |
| "loss": 0.1056, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.806930693069307, |
| "grad_norm": 0.39633154642595236, |
| "learning_rate": 2.8853811467875413e-07, |
| "loss": 0.1108, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.8316831683168315, |
| "grad_norm": 0.668178976075422, |
| "learning_rate": 2.2062215669397201e-07, |
| "loss": 0.1131, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.8564356435643563, |
| "grad_norm": 0.6504982001089206, |
| "learning_rate": 1.616296879852175e-07, |
| "loss": 0.1201, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.881188118811881, |
| "grad_norm": 0.5578296874781064, |
| "learning_rate": 1.1167085231579111e-07, |
| "loss": 0.1064, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.9059405940594059, |
| "grad_norm": 0.5447338993738188, |
| "learning_rate": 7.083892691736428e-08, |
| "loss": 0.1125, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.9306930693069306, |
| "grad_norm": 0.4709183650959437, |
| "learning_rate": 3.9210148333763135e-08, |
| "loss": 0.1128, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.9554455445544554, |
| "grad_norm": 0.7903334925188071, |
| "learning_rate": 1.684357008110593e-08, |
| "loss": 0.1098, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.9801980198019802, |
| "grad_norm": 0.6285027843202846, |
| "learning_rate": 3.780952390058379e-09, |
| "loss": 0.11, |
| "step": 800 |
| }, |
| { |
| "epoch": 2.0, |
| "step": 808, |
| "total_flos": 1.478858156980306e+17, |
| "train_loss": 0.17634621978101164, |
| "train_runtime": 12683.1297, |
| "train_samples_per_second": 0.509, |
| "train_steps_per_second": 0.064 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 808, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 10000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.478858156980306e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|