|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9746835443037973, |
|
"eval_steps": 40, |
|
"global_step": 316, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.006329113924050633, |
|
"grad_norm": 0.40900278091430664, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.3916, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006329113924050633, |
|
"eval_loss": 1.3258041143417358, |
|
"eval_runtime": 26.8301, |
|
"eval_samples_per_second": 2.87, |
|
"eval_steps_per_second": 0.373, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.012658227848101266, |
|
"grad_norm": 0.3763831555843353, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.3133, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0189873417721519, |
|
"grad_norm": 0.3689829111099243, |
|
"learning_rate": 3e-06, |
|
"loss": 1.311, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02531645569620253, |
|
"grad_norm": 0.3426308035850525, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.354, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03164556962025317, |
|
"grad_norm": 0.3220744729042053, |
|
"learning_rate": 5e-06, |
|
"loss": 1.2563, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0379746835443038, |
|
"grad_norm": 0.3353489935398102, |
|
"learning_rate": 6e-06, |
|
"loss": 1.2544, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04430379746835443, |
|
"grad_norm": 0.38241901993751526, |
|
"learning_rate": 7e-06, |
|
"loss": 1.3028, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.05063291139240506, |
|
"grad_norm": 0.34248751401901245, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.2118, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.056962025316455694, |
|
"grad_norm": 0.39697983860969543, |
|
"learning_rate": 9e-06, |
|
"loss": 1.3622, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.06329113924050633, |
|
"grad_norm": 0.34868791699409485, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3546, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06962025316455696, |
|
"grad_norm": 0.3968316316604614, |
|
"learning_rate": 9.999736492435867e-06, |
|
"loss": 1.2492, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0759493670886076, |
|
"grad_norm": 0.3699168860912323, |
|
"learning_rate": 9.998945997517957e-06, |
|
"loss": 1.3858, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.08227848101265822, |
|
"grad_norm": 0.38284507393836975, |
|
"learning_rate": 9.99762859856683e-06, |
|
"loss": 1.3124, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08860759493670886, |
|
"grad_norm": 0.38110166788101196, |
|
"learning_rate": 9.99578443444032e-06, |
|
"loss": 1.3412, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0949367088607595, |
|
"grad_norm": 0.3838903307914734, |
|
"learning_rate": 9.993413699518906e-06, |
|
"loss": 1.359, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.10126582278481013, |
|
"grad_norm": 0.37867996096611023, |
|
"learning_rate": 9.990516643685222e-06, |
|
"loss": 1.2801, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.10759493670886076, |
|
"grad_norm": 0.4113750457763672, |
|
"learning_rate": 9.987093572297716e-06, |
|
"loss": 1.2252, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.11392405063291139, |
|
"grad_norm": 0.4277723729610443, |
|
"learning_rate": 9.983144846158472e-06, |
|
"loss": 1.3162, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.12025316455696203, |
|
"grad_norm": 0.42338159680366516, |
|
"learning_rate": 9.978670881475173e-06, |
|
"loss": 1.3497, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.12658227848101267, |
|
"grad_norm": 0.39867302775382996, |
|
"learning_rate": 9.973672149817232e-06, |
|
"loss": 1.2736, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.13291139240506328, |
|
"grad_norm": 0.4052245616912842, |
|
"learning_rate": 9.96814917806609e-06, |
|
"loss": 1.3099, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.13924050632911392, |
|
"grad_norm": 0.42667412757873535, |
|
"learning_rate": 9.96210254835968e-06, |
|
"loss": 1.2823, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.14556962025316456, |
|
"grad_norm": 0.4743477404117584, |
|
"learning_rate": 9.955532898031069e-06, |
|
"loss": 1.2729, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1518987341772152, |
|
"grad_norm": 0.44868361949920654, |
|
"learning_rate": 9.948440919541277e-06, |
|
"loss": 1.2532, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.15822784810126583, |
|
"grad_norm": 0.4610985815525055, |
|
"learning_rate": 9.940827360406297e-06, |
|
"loss": 1.2509, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.16455696202531644, |
|
"grad_norm": 0.42452627420425415, |
|
"learning_rate": 9.932693023118299e-06, |
|
"loss": 1.2821, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.17088607594936708, |
|
"grad_norm": 0.454681932926178, |
|
"learning_rate": 9.924038765061042e-06, |
|
"loss": 1.312, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.17721518987341772, |
|
"grad_norm": 0.4850795865058899, |
|
"learning_rate": 9.91486549841951e-06, |
|
"loss": 1.2769, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.18354430379746836, |
|
"grad_norm": 0.4521865248680115, |
|
"learning_rate": 9.905174190083763e-06, |
|
"loss": 1.2106, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.189873417721519, |
|
"grad_norm": 0.5045363903045654, |
|
"learning_rate": 9.894965861547023e-06, |
|
"loss": 1.3017, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1962025316455696, |
|
"grad_norm": 0.49623697996139526, |
|
"learning_rate": 9.884241588798004e-06, |
|
"loss": 1.2713, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.20253164556962025, |
|
"grad_norm": 0.5158098936080933, |
|
"learning_rate": 9.873002502207502e-06, |
|
"loss": 1.1848, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.2088607594936709, |
|
"grad_norm": 0.4835788309574127, |
|
"learning_rate": 9.861249786409248e-06, |
|
"loss": 1.1757, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.21518987341772153, |
|
"grad_norm": 0.47741463780403137, |
|
"learning_rate": 9.848984680175049e-06, |
|
"loss": 1.2043, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.22151898734177214, |
|
"grad_norm": 0.48278170824050903, |
|
"learning_rate": 9.836208476284208e-06, |
|
"loss": 1.2695, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.22784810126582278, |
|
"grad_norm": 0.48047342896461487, |
|
"learning_rate": 9.822922521387277e-06, |
|
"loss": 1.2106, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.23417721518987342, |
|
"grad_norm": 0.4800684154033661, |
|
"learning_rate": 9.809128215864096e-06, |
|
"loss": 1.1337, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.24050632911392406, |
|
"grad_norm": 0.47258004546165466, |
|
"learning_rate": 9.794827013676206e-06, |
|
"loss": 1.1514, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.2468354430379747, |
|
"grad_norm": 0.4834759533405304, |
|
"learning_rate": 9.78002042221359e-06, |
|
"loss": 1.1554, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.25316455696202533, |
|
"grad_norm": 0.4504500925540924, |
|
"learning_rate": 9.764710002135784e-06, |
|
"loss": 1.1773, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.25316455696202533, |
|
"eval_loss": 1.1926251649856567, |
|
"eval_runtime": 26.748, |
|
"eval_samples_per_second": 2.879, |
|
"eval_steps_per_second": 0.374, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.25949367088607594, |
|
"grad_norm": 0.4607960283756256, |
|
"learning_rate": 9.748897367207391e-06, |
|
"loss": 1.1435, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.26582278481012656, |
|
"grad_norm": 0.492300808429718, |
|
"learning_rate": 9.732584184127973e-06, |
|
"loss": 1.1635, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2721518987341772, |
|
"grad_norm": 0.613067090511322, |
|
"learning_rate": 9.715772172356388e-06, |
|
"loss": 1.1724, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.27848101265822783, |
|
"grad_norm": 0.5141116380691528, |
|
"learning_rate": 9.698463103929542e-06, |
|
"loss": 1.2126, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2848101265822785, |
|
"grad_norm": 0.44970235228538513, |
|
"learning_rate": 9.68065880327562e-06, |
|
"loss": 1.1034, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2911392405063291, |
|
"grad_norm": 0.4816325902938843, |
|
"learning_rate": 9.66236114702178e-06, |
|
"loss": 1.1584, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.2974683544303797, |
|
"grad_norm": 0.4725569486618042, |
|
"learning_rate": 9.643572063796352e-06, |
|
"loss": 1.1046, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.3037974683544304, |
|
"grad_norm": 0.4943830966949463, |
|
"learning_rate": 9.62429353402556e-06, |
|
"loss": 1.1745, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.310126582278481, |
|
"grad_norm": 0.4736022651195526, |
|
"learning_rate": 9.60452758972477e-06, |
|
"loss": 1.1206, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.31645569620253167, |
|
"grad_norm": 0.6085686087608337, |
|
"learning_rate": 9.584276314284316e-06, |
|
"loss": 1.1544, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3227848101265823, |
|
"grad_norm": 0.5274167060852051, |
|
"learning_rate": 9.563541842249903e-06, |
|
"loss": 1.1517, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.3291139240506329, |
|
"grad_norm": 0.5260668396949768, |
|
"learning_rate": 9.542326359097619e-06, |
|
"loss": 1.1754, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.33544303797468356, |
|
"grad_norm": 0.540859043598175, |
|
"learning_rate": 9.520632101003579e-06, |
|
"loss": 1.117, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.34177215189873417, |
|
"grad_norm": 0.5016303062438965, |
|
"learning_rate": 9.498461354608228e-06, |
|
"loss": 1.0925, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.34810126582278483, |
|
"grad_norm": 0.5748270750045776, |
|
"learning_rate": 9.475816456775313e-06, |
|
"loss": 1.0432, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.35443037974683544, |
|
"grad_norm": 0.5043241381645203, |
|
"learning_rate": 9.452699794345583e-06, |
|
"loss": 1.0721, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.36075949367088606, |
|
"grad_norm": 0.5576872229576111, |
|
"learning_rate": 9.429113803885199e-06, |
|
"loss": 1.1109, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.3670886075949367, |
|
"grad_norm": 0.5111832022666931, |
|
"learning_rate": 9.405060971428924e-06, |
|
"loss": 1.106, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.37341772151898733, |
|
"grad_norm": 0.551474928855896, |
|
"learning_rate": 9.380543832218069e-06, |
|
"loss": 1.0588, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.379746835443038, |
|
"grad_norm": 0.5133005976676941, |
|
"learning_rate": 9.355564970433288e-06, |
|
"loss": 1.0523, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.3860759493670886, |
|
"grad_norm": 0.5074732303619385, |
|
"learning_rate": 9.330127018922195e-06, |
|
"loss": 1.0051, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.3924050632911392, |
|
"grad_norm": 0.5279291868209839, |
|
"learning_rate": 9.30423265892184e-06, |
|
"loss": 1.0805, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.3987341772151899, |
|
"grad_norm": 0.5405828356742859, |
|
"learning_rate": 9.277884619776116e-06, |
|
"loss": 1.093, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.4050632911392405, |
|
"grad_norm": 0.5386417508125305, |
|
"learning_rate": 9.251085678648072e-06, |
|
"loss": 1.0616, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.41139240506329117, |
|
"grad_norm": 0.563132643699646, |
|
"learning_rate": 9.223838660227183e-06, |
|
"loss": 1.0719, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.4177215189873418, |
|
"grad_norm": 0.510470986366272, |
|
"learning_rate": 9.196146436431635e-06, |
|
"loss": 1.0247, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.4240506329113924, |
|
"grad_norm": 0.5115028023719788, |
|
"learning_rate": 9.168011926105598e-06, |
|
"loss": 1.0982, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.43037974683544306, |
|
"grad_norm": 0.6258170008659363, |
|
"learning_rate": 9.13943809471159e-06, |
|
"loss": 1.0202, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.43670886075949367, |
|
"grad_norm": 0.5000303983688354, |
|
"learning_rate": 9.110427954017891e-06, |
|
"loss": 1.0214, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.4430379746835443, |
|
"grad_norm": 0.5931710600852966, |
|
"learning_rate": 9.08098456178111e-06, |
|
"loss": 1.0137, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.44936708860759494, |
|
"grad_norm": 0.5410647392272949, |
|
"learning_rate": 9.051111021423868e-06, |
|
"loss": 0.9727, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.45569620253164556, |
|
"grad_norm": 0.5379135608673096, |
|
"learning_rate": 9.020810481707709e-06, |
|
"loss": 1.0397, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.4620253164556962, |
|
"grad_norm": 0.6328117251396179, |
|
"learning_rate": 8.990086136401199e-06, |
|
"loss": 1.1266, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.46835443037974683, |
|
"grad_norm": 0.5720458626747131, |
|
"learning_rate": 8.958941223943292e-06, |
|
"loss": 0.9825, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.47468354430379744, |
|
"grad_norm": 0.6638948917388916, |
|
"learning_rate": 8.927379027101994e-06, |
|
"loss": 1.102, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.4810126582278481, |
|
"grad_norm": 0.5943340063095093, |
|
"learning_rate": 8.895402872628352e-06, |
|
"loss": 1.016, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.4873417721518987, |
|
"grad_norm": 0.5992004871368408, |
|
"learning_rate": 8.863016130905795e-06, |
|
"loss": 1.0702, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.4936708860759494, |
|
"grad_norm": 0.5849875807762146, |
|
"learning_rate": 8.83022221559489e-06, |
|
"loss": 1.0524, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.6057909727096558, |
|
"learning_rate": 8.797024583273536e-06, |
|
"loss": 1.0336, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.5063291139240507, |
|
"grad_norm": 0.5501203536987305, |
|
"learning_rate": 8.763426733072624e-06, |
|
"loss": 1.0169, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5063291139240507, |
|
"eval_loss": 1.054015874862671, |
|
"eval_runtime": 28.3542, |
|
"eval_samples_per_second": 2.716, |
|
"eval_steps_per_second": 0.353, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5126582278481012, |
|
"grad_norm": 0.5859403610229492, |
|
"learning_rate": 8.729432206307218e-06, |
|
"loss": 1.055, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.5189873417721519, |
|
"grad_norm": 0.5742907524108887, |
|
"learning_rate": 8.695044586103297e-06, |
|
"loss": 1.0461, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.5253164556962026, |
|
"grad_norm": 0.6878105401992798, |
|
"learning_rate": 8.660267497020074e-06, |
|
"loss": 1.0446, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.5316455696202531, |
|
"grad_norm": 0.5701872706413269, |
|
"learning_rate": 8.625104604667965e-06, |
|
"loss": 1.0535, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.5379746835443038, |
|
"grad_norm": 0.7363747358322144, |
|
"learning_rate": 8.58955961532221e-06, |
|
"loss": 1.0745, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5443037974683544, |
|
"grad_norm": 0.5861352682113647, |
|
"learning_rate": 8.553636275532236e-06, |
|
"loss": 1.0383, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.5506329113924051, |
|
"grad_norm": 0.5236722230911255, |
|
"learning_rate": 8.51733837172675e-06, |
|
"loss": 1.0031, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5569620253164557, |
|
"grad_norm": 0.5922764539718628, |
|
"learning_rate": 8.480669729814635e-06, |
|
"loss": 1.0056, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5632911392405063, |
|
"grad_norm": 0.5885330438613892, |
|
"learning_rate": 8.443634214781693e-06, |
|
"loss": 1.043, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.569620253164557, |
|
"grad_norm": 0.5888968706130981, |
|
"learning_rate": 8.40623573028327e-06, |
|
"loss": 1.021, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5759493670886076, |
|
"grad_norm": 0.5723572969436646, |
|
"learning_rate": 8.368478218232787e-06, |
|
"loss": 0.9946, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.5822784810126582, |
|
"grad_norm": 0.5512247085571289, |
|
"learning_rate": 8.330365658386252e-06, |
|
"loss": 1.027, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.5886075949367089, |
|
"grad_norm": 0.5887519121170044, |
|
"learning_rate": 8.291902067922791e-06, |
|
"loss": 1.0597, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.5949367088607594, |
|
"grad_norm": 0.6329179406166077, |
|
"learning_rate": 8.25309150102121e-06, |
|
"loss": 0.9517, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.6012658227848101, |
|
"grad_norm": 0.5992307662963867, |
|
"learning_rate": 8.213938048432697e-06, |
|
"loss": 0.9905, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6075949367088608, |
|
"grad_norm": 0.57530677318573, |
|
"learning_rate": 8.174445837049614e-06, |
|
"loss": 0.973, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.6139240506329114, |
|
"grad_norm": 0.6094960570335388, |
|
"learning_rate": 8.134619029470535e-06, |
|
"loss": 0.952, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.620253164556962, |
|
"grad_norm": 0.5998733639717102, |
|
"learning_rate": 8.094461823561473e-06, |
|
"loss": 0.8997, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.6265822784810127, |
|
"grad_norm": 0.6438018679618835, |
|
"learning_rate": 8.05397845201344e-06, |
|
"loss": 0.9927, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.6329113924050633, |
|
"grad_norm": 0.5597391128540039, |
|
"learning_rate": 8.013173181896283e-06, |
|
"loss": 0.9735, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6392405063291139, |
|
"grad_norm": 0.5724840760231018, |
|
"learning_rate": 7.972050314208934e-06, |
|
"loss": 1.0058, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.6455696202531646, |
|
"grad_norm": 0.7222654819488525, |
|
"learning_rate": 7.930614183426074e-06, |
|
"loss": 0.9559, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.6518987341772152, |
|
"grad_norm": 0.646041214466095, |
|
"learning_rate": 7.888869157041257e-06, |
|
"loss": 0.9883, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.6582278481012658, |
|
"grad_norm": 0.6527572274208069, |
|
"learning_rate": 7.846819635106569e-06, |
|
"loss": 1.0445, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.6645569620253164, |
|
"grad_norm": 0.6058819890022278, |
|
"learning_rate": 7.80447004976885e-06, |
|
"loss": 0.955, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6708860759493671, |
|
"grad_norm": 0.6299486756324768, |
|
"learning_rate": 7.76182486480253e-06, |
|
"loss": 0.9768, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.6772151898734177, |
|
"grad_norm": 0.590530276298523, |
|
"learning_rate": 7.718888575139134e-06, |
|
"loss": 0.9594, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.6835443037974683, |
|
"grad_norm": 0.673035740852356, |
|
"learning_rate": 7.675665706393502e-06, |
|
"loss": 1.0659, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.689873417721519, |
|
"grad_norm": 0.5782584547996521, |
|
"learning_rate": 7.63216081438678e-06, |
|
"loss": 0.9938, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.6962025316455697, |
|
"grad_norm": 0.7264094352722168, |
|
"learning_rate": 7.588378484666214e-06, |
|
"loss": 0.9571, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.7025316455696202, |
|
"grad_norm": 0.6233193874359131, |
|
"learning_rate": 7.544323332021826e-06, |
|
"loss": 0.9843, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.7088607594936709, |
|
"grad_norm": 0.6951065063476562, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.9759, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.7151898734177216, |
|
"grad_norm": 0.6585705876350403, |
|
"learning_rate": 7.4554131604140425e-06, |
|
"loss": 1.0337, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.7215189873417721, |
|
"grad_norm": 0.6394013166427612, |
|
"learning_rate": 7.4105675128517456e-06, |
|
"loss": 0.9855, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.7278481012658228, |
|
"grad_norm": 0.6313098073005676, |
|
"learning_rate": 7.365467784180051e-06, |
|
"loss": 0.9929, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.7341772151898734, |
|
"grad_norm": 0.7581190466880798, |
|
"learning_rate": 7.320118728046818e-06, |
|
"loss": 0.9525, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.740506329113924, |
|
"grad_norm": 0.6447349190711975, |
|
"learning_rate": 7.274525124379773e-06, |
|
"loss": 1.057, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.7468354430379747, |
|
"grad_norm": 0.6613853573799133, |
|
"learning_rate": 7.2286917788826926e-06, |
|
"loss": 0.9863, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.7531645569620253, |
|
"grad_norm": 0.6976621150970459, |
|
"learning_rate": 7.182623522528866e-06, |
|
"loss": 1.0098, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.759493670886076, |
|
"grad_norm": 0.6151190996170044, |
|
"learning_rate": 7.136325211051905e-06, |
|
"loss": 0.9927, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.759493670886076, |
|
"eval_loss": 1.0121220350265503, |
|
"eval_runtime": 26.8557, |
|
"eval_samples_per_second": 2.867, |
|
"eval_steps_per_second": 0.372, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.7658227848101266, |
|
"grad_norm": 0.7940335273742676, |
|
"learning_rate": 7.089801724433918e-06, |
|
"loss": 1.0358, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.7721518987341772, |
|
"grad_norm": 0.6850300431251526, |
|
"learning_rate": 7.043057966391158e-06, |
|
"loss": 1.0059, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.7784810126582279, |
|
"grad_norm": 0.6112099885940552, |
|
"learning_rate": 6.996098863857155e-06, |
|
"loss": 0.983, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.7848101265822784, |
|
"grad_norm": 0.6043317317962646, |
|
"learning_rate": 6.948929366463397e-06, |
|
"loss": 1.0636, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.7911392405063291, |
|
"grad_norm": 0.6628001928329468, |
|
"learning_rate": 6.9015544460176296e-06, |
|
"loss": 0.9459, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.7974683544303798, |
|
"grad_norm": 0.748231828212738, |
|
"learning_rate": 6.8539790959798045e-06, |
|
"loss": 1.0215, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.8037974683544303, |
|
"grad_norm": 0.6250873804092407, |
|
"learning_rate": 6.806208330935766e-06, |
|
"loss": 0.9983, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.810126582278481, |
|
"grad_norm": 0.7003646492958069, |
|
"learning_rate": 6.758247186068684e-06, |
|
"loss": 1.0286, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.8164556962025317, |
|
"grad_norm": 0.6818345785140991, |
|
"learning_rate": 6.710100716628345e-06, |
|
"loss": 0.9525, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.8227848101265823, |
|
"grad_norm": 0.7016712427139282, |
|
"learning_rate": 6.6617739973982985e-06, |
|
"loss": 1.0184, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.8291139240506329, |
|
"grad_norm": 0.6990473866462708, |
|
"learning_rate": 6.613272122160975e-06, |
|
"loss": 0.9569, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.8354430379746836, |
|
"grad_norm": 0.6467525959014893, |
|
"learning_rate": 6.5646002031607726e-06, |
|
"loss": 0.9445, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.8417721518987342, |
|
"grad_norm": 0.6543334722518921, |
|
"learning_rate": 6.515763370565218e-06, |
|
"loss": 0.984, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.8481012658227848, |
|
"grad_norm": 0.756028950214386, |
|
"learning_rate": 6.466766771924231e-06, |
|
"loss": 1.0284, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.8544303797468354, |
|
"grad_norm": 0.6072544455528259, |
|
"learning_rate": 6.417615571627555e-06, |
|
"loss": 1.0102, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.8607594936708861, |
|
"grad_norm": 0.7596509456634521, |
|
"learning_rate": 6.368314950360416e-06, |
|
"loss": 1.0202, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.8670886075949367, |
|
"grad_norm": 0.6676567792892456, |
|
"learning_rate": 6.318870104557459e-06, |
|
"loss": 0.9559, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.8734177215189873, |
|
"grad_norm": 0.6714494228363037, |
|
"learning_rate": 6.269286245855039e-06, |
|
"loss": 0.9715, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.879746835443038, |
|
"grad_norm": 0.6048732399940491, |
|
"learning_rate": 6.219568600541886e-06, |
|
"loss": 0.9552, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.8860759493670886, |
|
"grad_norm": 0.619331955909729, |
|
"learning_rate": 6.169722409008244e-06, |
|
"loss": 0.9541, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.8924050632911392, |
|
"grad_norm": 0.7157178521156311, |
|
"learning_rate": 6.119752925193516e-06, |
|
"loss": 1.0113, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.8987341772151899, |
|
"grad_norm": 0.5414648652076721, |
|
"learning_rate": 6.0696654160324875e-06, |
|
"loss": 0.9714, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.9050632911392406, |
|
"grad_norm": 0.627295970916748, |
|
"learning_rate": 6.019465160900173e-06, |
|
"loss": 0.9448, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.9113924050632911, |
|
"grad_norm": 0.6677543520927429, |
|
"learning_rate": 5.9691574510553505e-06, |
|
"loss": 0.9915, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.9177215189873418, |
|
"grad_norm": 0.7285206913948059, |
|
"learning_rate": 5.918747589082853e-06, |
|
"loss": 1.0241, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.9240506329113924, |
|
"grad_norm": 0.638154149055481, |
|
"learning_rate": 5.8682408883346535e-06, |
|
"loss": 0.9622, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.930379746835443, |
|
"grad_norm": 0.6200531125068665, |
|
"learning_rate": 5.817642672369825e-06, |
|
"loss": 0.9043, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.9367088607594937, |
|
"grad_norm": 0.6375910043716431, |
|
"learning_rate": 5.766958274393428e-06, |
|
"loss": 0.9759, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.9430379746835443, |
|
"grad_norm": 0.6902610659599304, |
|
"learning_rate": 5.716193036694359e-06, |
|
"loss": 0.9636, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.9493670886075949, |
|
"grad_norm": 0.6575567126274109, |
|
"learning_rate": 5.66535231008227e-06, |
|
"loss": 0.9375, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9556962025316456, |
|
"grad_norm": 0.7073491811752319, |
|
"learning_rate": 5.614441453323571e-06, |
|
"loss": 1.0123, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.9620253164556962, |
|
"grad_norm": 0.6990174055099487, |
|
"learning_rate": 5.5634658325766066e-06, |
|
"loss": 0.9554, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.9683544303797469, |
|
"grad_norm": 0.7610283493995667, |
|
"learning_rate": 5.512430820826035e-06, |
|
"loss": 0.9396, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.9746835443037974, |
|
"grad_norm": 0.6636858582496643, |
|
"learning_rate": 5.46134179731651e-06, |
|
"loss": 0.9572, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.9810126582278481, |
|
"grad_norm": 0.6344922780990601, |
|
"learning_rate": 5.41020414698569e-06, |
|
"loss": 0.936, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.9873417721518988, |
|
"grad_norm": 0.707417905330658, |
|
"learning_rate": 5.359023259896638e-06, |
|
"loss": 0.9892, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.9936708860759493, |
|
"grad_norm": 0.7896108627319336, |
|
"learning_rate": 5.3078045306697154e-06, |
|
"loss": 1.0095, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.7199612855911255, |
|
"learning_rate": 5.2565533579139484e-06, |
|
"loss": 0.9876, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.0063291139240507, |
|
"grad_norm": 0.7145763635635376, |
|
"learning_rate": 5.205275143658018e-06, |
|
"loss": 0.9937, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.0126582278481013, |
|
"grad_norm": 0.6825771927833557, |
|
"learning_rate": 5.153975292780852e-06, |
|
"loss": 0.9546, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.0126582278481013, |
|
"eval_loss": 0.9962683320045471, |
|
"eval_runtime": 27.1734, |
|
"eval_samples_per_second": 2.834, |
|
"eval_steps_per_second": 0.368, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.018987341772152, |
|
"grad_norm": 0.6719457507133484, |
|
"learning_rate": 5.102659212441953e-06, |
|
"loss": 1.0176, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.0253164556962024, |
|
"grad_norm": 0.7371258735656738, |
|
"learning_rate": 5.05133231151145e-06, |
|
"loss": 1.0144, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.0063291139240507, |
|
"grad_norm": 0.8656511902809143, |
|
"learning_rate": 5e-06, |
|
"loss": 0.9327, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.0126582278481013, |
|
"grad_norm": 0.8384556174278259, |
|
"learning_rate": 4.948667688488552e-06, |
|
"loss": 0.9383, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.018987341772152, |
|
"grad_norm": 0.7570232152938843, |
|
"learning_rate": 4.8973407875580485e-06, |
|
"loss": 0.9649, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.0253164556962024, |
|
"grad_norm": 0.7129900455474854, |
|
"learning_rate": 4.846024707219149e-06, |
|
"loss": 1.0226, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.0316455696202531, |
|
"grad_norm": 0.6944881081581116, |
|
"learning_rate": 4.794724856341985e-06, |
|
"loss": 0.9879, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.0379746835443038, |
|
"grad_norm": 0.6823258996009827, |
|
"learning_rate": 4.7434466420860515e-06, |
|
"loss": 0.9471, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.0443037974683544, |
|
"grad_norm": 0.6720079183578491, |
|
"learning_rate": 4.692195469330286e-06, |
|
"loss": 0.9464, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.0506329113924051, |
|
"grad_norm": 0.7437319755554199, |
|
"learning_rate": 4.640976740103363e-06, |
|
"loss": 1.0245, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.0569620253164558, |
|
"grad_norm": 0.7405062317848206, |
|
"learning_rate": 4.589795853014313e-06, |
|
"loss": 0.9653, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.0632911392405062, |
|
"grad_norm": 0.7187413573265076, |
|
"learning_rate": 4.53865820268349e-06, |
|
"loss": 1.0003, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.0696202531645569, |
|
"grad_norm": 0.7061622142791748, |
|
"learning_rate": 4.4875691791739655e-06, |
|
"loss": 0.9287, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.0759493670886076, |
|
"grad_norm": 0.652839183807373, |
|
"learning_rate": 4.436534167423395e-06, |
|
"loss": 0.9044, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.0822784810126582, |
|
"grad_norm": 0.7338419556617737, |
|
"learning_rate": 4.3855585466764305e-06, |
|
"loss": 0.949, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.0886075949367089, |
|
"grad_norm": 0.6877685189247131, |
|
"learning_rate": 4.334647689917734e-06, |
|
"loss": 0.9097, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.0949367088607596, |
|
"grad_norm": 0.7495954036712646, |
|
"learning_rate": 4.283806963305644e-06, |
|
"loss": 0.9667, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.1012658227848102, |
|
"grad_norm": 0.8571280241012573, |
|
"learning_rate": 4.233041725606573e-06, |
|
"loss": 0.9716, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.1075949367088607, |
|
"grad_norm": 0.7294744849205017, |
|
"learning_rate": 4.182357327630175e-06, |
|
"loss": 0.962, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.1139240506329113, |
|
"grad_norm": 0.6910297870635986, |
|
"learning_rate": 4.131759111665349e-06, |
|
"loss": 0.9634, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.120253164556962, |
|
"grad_norm": 0.7858085036277771, |
|
"learning_rate": 4.081252410917148e-06, |
|
"loss": 1.0137, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.1265822784810127, |
|
"grad_norm": 0.6981230974197388, |
|
"learning_rate": 4.03084254894465e-06, |
|
"loss": 1.005, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.1329113924050633, |
|
"grad_norm": 0.7483316659927368, |
|
"learning_rate": 3.980534839099829e-06, |
|
"loss": 1.0108, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.139240506329114, |
|
"grad_norm": 0.8037570118904114, |
|
"learning_rate": 3.930334583967514e-06, |
|
"loss": 1.048, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.1455696202531644, |
|
"grad_norm": 0.686886727809906, |
|
"learning_rate": 3.8802470748064855e-06, |
|
"loss": 0.9364, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.1518987341772151, |
|
"grad_norm": 0.6869245171546936, |
|
"learning_rate": 3.8302775909917585e-06, |
|
"loss": 1.0346, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.1582278481012658, |
|
"grad_norm": 0.6785135269165039, |
|
"learning_rate": 3.7804313994581143e-06, |
|
"loss": 0.9497, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.1645569620253164, |
|
"grad_norm": 0.6726052165031433, |
|
"learning_rate": 3.730713754144961e-06, |
|
"loss": 0.87, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.1708860759493671, |
|
"grad_norm": 0.7181726694107056, |
|
"learning_rate": 3.68112989544254e-06, |
|
"loss": 0.9707, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.1772151898734178, |
|
"grad_norm": 0.6745687127113342, |
|
"learning_rate": 3.6316850496395863e-06, |
|
"loss": 0.8986, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.1835443037974684, |
|
"grad_norm": 0.6786714792251587, |
|
"learning_rate": 3.5823844283724464e-06, |
|
"loss": 0.8992, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.189873417721519, |
|
"grad_norm": 0.7817081809043884, |
|
"learning_rate": 3.5332332280757706e-06, |
|
"loss": 0.9248, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.1962025316455696, |
|
"grad_norm": 0.6921507716178894, |
|
"learning_rate": 3.484236629434783e-06, |
|
"loss": 0.9446, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.2025316455696202, |
|
"grad_norm": 0.7437227368354797, |
|
"learning_rate": 3.4353997968392295e-06, |
|
"loss": 0.9208, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.2088607594936709, |
|
"grad_norm": 0.7182348370552063, |
|
"learning_rate": 3.386727877839027e-06, |
|
"loss": 0.9494, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.2151898734177216, |
|
"grad_norm": 0.7869457006454468, |
|
"learning_rate": 3.3382260026017027e-06, |
|
"loss": 0.9336, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.2215189873417722, |
|
"grad_norm": 0.645630955696106, |
|
"learning_rate": 3.289899283371657e-06, |
|
"loss": 0.8883, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.2278481012658227, |
|
"grad_norm": 0.6819594502449036, |
|
"learning_rate": 3.241752813931316e-06, |
|
"loss": 0.9696, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.2341772151898733, |
|
"grad_norm": 0.7497106790542603, |
|
"learning_rate": 3.1937916690642356e-06, |
|
"loss": 0.9223, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.240506329113924, |
|
"grad_norm": 0.6808606386184692, |
|
"learning_rate": 3.1460209040201967e-06, |
|
"loss": 0.9469, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.240506329113924, |
|
"eval_loss": 0.9852771759033203, |
|
"eval_runtime": 26.8649, |
|
"eval_samples_per_second": 2.866, |
|
"eval_steps_per_second": 0.372, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.2468354430379747, |
|
"grad_norm": 0.7417311072349548, |
|
"learning_rate": 3.098445553982372e-06, |
|
"loss": 0.9138, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.2531645569620253, |
|
"grad_norm": 0.695534348487854, |
|
"learning_rate": 3.0510706335366034e-06, |
|
"loss": 0.9581, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.259493670886076, |
|
"grad_norm": 0.718514084815979, |
|
"learning_rate": 3.0039011361428466e-06, |
|
"loss": 0.9887, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.2658227848101267, |
|
"grad_norm": 0.7240175604820251, |
|
"learning_rate": 2.956942033608843e-06, |
|
"loss": 0.9678, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.2721518987341773, |
|
"grad_norm": 0.7439044713973999, |
|
"learning_rate": 2.910198275566085e-06, |
|
"loss": 1.0285, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.2784810126582278, |
|
"grad_norm": 0.6482222676277161, |
|
"learning_rate": 2.863674788948097e-06, |
|
"loss": 0.841, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.2848101265822784, |
|
"grad_norm": 0.6140475273132324, |
|
"learning_rate": 2.817376477471132e-06, |
|
"loss": 0.9727, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.2911392405063291, |
|
"grad_norm": 0.6975511312484741, |
|
"learning_rate": 2.771308221117309e-06, |
|
"loss": 0.966, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.2974683544303798, |
|
"grad_norm": 0.798649787902832, |
|
"learning_rate": 2.725474875620228e-06, |
|
"loss": 0.9705, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.3037974683544304, |
|
"grad_norm": 0.737694501876831, |
|
"learning_rate": 2.6798812719531843e-06, |
|
"loss": 0.9599, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.310126582278481, |
|
"grad_norm": 0.6566654443740845, |
|
"learning_rate": 2.6345322158199503e-06, |
|
"loss": 0.9911, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.3164556962025316, |
|
"grad_norm": 0.8401856422424316, |
|
"learning_rate": 2.5894324871482557e-06, |
|
"loss": 0.95, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.3227848101265822, |
|
"grad_norm": 0.7995955944061279, |
|
"learning_rate": 2.544586839585961e-06, |
|
"loss": 0.9634, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.3291139240506329, |
|
"grad_norm": 0.7287630438804626, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.9427, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.3354430379746836, |
|
"grad_norm": 0.7197180390357971, |
|
"learning_rate": 2.4556766679781763e-06, |
|
"loss": 0.9687, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.3417721518987342, |
|
"grad_norm": 0.6508699059486389, |
|
"learning_rate": 2.411621515333788e-06, |
|
"loss": 0.8793, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.3481012658227849, |
|
"grad_norm": 0.7081306576728821, |
|
"learning_rate": 2.3678391856132203e-06, |
|
"loss": 0.9795, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.3544303797468356, |
|
"grad_norm": 0.8779144287109375, |
|
"learning_rate": 2.324334293606499e-06, |
|
"loss": 0.9973, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.360759493670886, |
|
"grad_norm": 0.7999278903007507, |
|
"learning_rate": 2.2811114248608675e-06, |
|
"loss": 0.9019, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.3670886075949367, |
|
"grad_norm": 0.710173487663269, |
|
"learning_rate": 2.238175135197471e-06, |
|
"loss": 0.9279, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.3734177215189873, |
|
"grad_norm": 0.7713471055030823, |
|
"learning_rate": 2.1955299502311523e-06, |
|
"loss": 0.969, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.379746835443038, |
|
"grad_norm": 0.6300449371337891, |
|
"learning_rate": 2.1531803648934333e-06, |
|
"loss": 0.8882, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.3860759493670887, |
|
"grad_norm": 0.7475897669792175, |
|
"learning_rate": 2.1111308429587446e-06, |
|
"loss": 0.9713, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.3924050632911391, |
|
"grad_norm": 0.6781705021858215, |
|
"learning_rate": 2.069385816573928e-06, |
|
"loss": 0.8796, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.3987341772151898, |
|
"grad_norm": 0.7179297208786011, |
|
"learning_rate": 2.0279496857910667e-06, |
|
"loss": 0.9812, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.4050632911392404, |
|
"grad_norm": 0.7066859602928162, |
|
"learning_rate": 1.9868268181037186e-06, |
|
"loss": 1.0138, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.4113924050632911, |
|
"grad_norm": 0.705045759677887, |
|
"learning_rate": 1.9460215479865613e-06, |
|
"loss": 1.0038, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.4177215189873418, |
|
"grad_norm": 0.6907618045806885, |
|
"learning_rate": 1.9055381764385272e-06, |
|
"loss": 0.9559, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.4240506329113924, |
|
"grad_norm": 0.6223387718200684, |
|
"learning_rate": 1.865380970529469e-06, |
|
"loss": 0.9316, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.4303797468354431, |
|
"grad_norm": 0.7444384694099426, |
|
"learning_rate": 1.8255541629503865e-06, |
|
"loss": 0.9734, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.4367088607594938, |
|
"grad_norm": 0.7602571845054626, |
|
"learning_rate": 1.7860619515673034e-06, |
|
"loss": 0.9567, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.4430379746835442, |
|
"grad_norm": 0.7953392863273621, |
|
"learning_rate": 1.746908498978791e-06, |
|
"loss": 1.0166, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.4493670886075949, |
|
"grad_norm": 0.7110669612884521, |
|
"learning_rate": 1.708097932077213e-06, |
|
"loss": 0.9717, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.4556962025316456, |
|
"grad_norm": 0.8688918352127075, |
|
"learning_rate": 1.6696343416137495e-06, |
|
"loss": 0.9629, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.4620253164556962, |
|
"grad_norm": 0.7148693799972534, |
|
"learning_rate": 1.6315217817672142e-06, |
|
"loss": 0.88, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.4683544303797469, |
|
"grad_norm": 0.6728506684303284, |
|
"learning_rate": 1.5937642697167288e-06, |
|
"loss": 0.9788, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.4746835443037973, |
|
"grad_norm": 0.6786729693412781, |
|
"learning_rate": 1.5563657852183072e-06, |
|
"loss": 0.9725, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.481012658227848, |
|
"grad_norm": 0.8744902014732361, |
|
"learning_rate": 1.5193302701853674e-06, |
|
"loss": 0.9251, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.4873417721518987, |
|
"grad_norm": 0.7750695943832397, |
|
"learning_rate": 1.4826616282732509e-06, |
|
"loss": 0.982, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.4936708860759493, |
|
"grad_norm": 0.8532109260559082, |
|
"learning_rate": 1.4463637244677648e-06, |
|
"loss": 0.981, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.4936708860759493, |
|
"eval_loss": 0.9807116985321045, |
|
"eval_runtime": 27.0659, |
|
"eval_samples_per_second": 2.845, |
|
"eval_steps_per_second": 0.369, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.6917183995246887, |
|
"learning_rate": 1.410440384677791e-06, |
|
"loss": 0.8644, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.5063291139240507, |
|
"grad_norm": 0.7056766748428345, |
|
"learning_rate": 1.374895395332037e-06, |
|
"loss": 0.8807, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.5126582278481013, |
|
"grad_norm": 0.6405808329582214, |
|
"learning_rate": 1.339732502979928e-06, |
|
"loss": 0.9948, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.518987341772152, |
|
"grad_norm": 0.7194777727127075, |
|
"learning_rate": 1.3049554138967052e-06, |
|
"loss": 0.9717, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.5253164556962027, |
|
"grad_norm": 0.7285950779914856, |
|
"learning_rate": 1.2705677936927841e-06, |
|
"loss": 0.8864, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.5316455696202531, |
|
"grad_norm": 0.7395027279853821, |
|
"learning_rate": 1.2365732669273778e-06, |
|
"loss": 0.9594, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.5379746835443038, |
|
"grad_norm": 0.7932447791099548, |
|
"learning_rate": 1.202975416726464e-06, |
|
"loss": 0.9798, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.5443037974683544, |
|
"grad_norm": 0.7805992960929871, |
|
"learning_rate": 1.1697777844051105e-06, |
|
"loss": 0.9369, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.5506329113924051, |
|
"grad_norm": 0.7040771842002869, |
|
"learning_rate": 1.1369838690942059e-06, |
|
"loss": 0.9634, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.5569620253164556, |
|
"grad_norm": 0.876220166683197, |
|
"learning_rate": 1.1045971273716476e-06, |
|
"loss": 1.0477, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.5632911392405062, |
|
"grad_norm": 0.7199251055717468, |
|
"learning_rate": 1.072620972898007e-06, |
|
"loss": 0.9679, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.5696202531645569, |
|
"grad_norm": 0.6955793499946594, |
|
"learning_rate": 1.0410587760567104e-06, |
|
"loss": 1.0017, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.5759493670886076, |
|
"grad_norm": 0.7453635334968567, |
|
"learning_rate": 1.0099138635988026e-06, |
|
"loss": 0.8877, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.5822784810126582, |
|
"grad_norm": 0.7684614658355713, |
|
"learning_rate": 9.791895182922911e-07, |
|
"loss": 0.9563, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.5886075949367089, |
|
"grad_norm": 0.7982029318809509, |
|
"learning_rate": 9.488889785761324e-07, |
|
"loss": 1.0245, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.5949367088607596, |
|
"grad_norm": 0.642513632774353, |
|
"learning_rate": 9.190154382188921e-07, |
|
"loss": 0.8915, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.6012658227848102, |
|
"grad_norm": 0.7223451733589172, |
|
"learning_rate": 8.895720459821089e-07, |
|
"loss": 0.9065, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.6075949367088609, |
|
"grad_norm": 0.7411177754402161, |
|
"learning_rate": 8.605619052884106e-07, |
|
"loss": 0.9737, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.6139240506329116, |
|
"grad_norm": 0.6289587020874023, |
|
"learning_rate": 8.31988073894403e-07, |
|
"loss": 0.9122, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.620253164556962, |
|
"grad_norm": 0.8470100164413452, |
|
"learning_rate": 8.03853563568367e-07, |
|
"loss": 0.9584, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.6265822784810127, |
|
"grad_norm": 0.7927916646003723, |
|
"learning_rate": 7.761613397728174e-07, |
|
"loss": 0.9865, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.6329113924050633, |
|
"grad_norm": 0.7874268889427185, |
|
"learning_rate": 7.489143213519301e-07, |
|
"loss": 0.9629, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.6392405063291138, |
|
"grad_norm": 0.7497284412384033, |
|
"learning_rate": 7.221153802238845e-07, |
|
"loss": 1.0514, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.6455696202531644, |
|
"grad_norm": 0.8058854341506958, |
|
"learning_rate": 6.957673410781617e-07, |
|
"loss": 0.9033, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.6518987341772151, |
|
"grad_norm": 0.6752853989601135, |
|
"learning_rate": 6.698729810778065e-07, |
|
"loss": 0.9259, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.6582278481012658, |
|
"grad_norm": 0.673598051071167, |
|
"learning_rate": 6.444350295667112e-07, |
|
"loss": 0.8831, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.6645569620253164, |
|
"grad_norm": 0.68260258436203, |
|
"learning_rate": 6.194561677819327e-07, |
|
"loss": 0.9342, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.6708860759493671, |
|
"grad_norm": 0.7135693430900574, |
|
"learning_rate": 5.949390285710777e-07, |
|
"loss": 0.9092, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.6772151898734178, |
|
"grad_norm": 0.8336507678031921, |
|
"learning_rate": 5.708861961148004e-07, |
|
"loss": 0.9273, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.6835443037974684, |
|
"grad_norm": 0.7212085127830505, |
|
"learning_rate": 5.473002056544191e-07, |
|
"loss": 0.9458, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.689873417721519, |
|
"grad_norm": 0.8677213191986084, |
|
"learning_rate": 5.241835432246888e-07, |
|
"loss": 0.9082, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.6962025316455698, |
|
"grad_norm": 0.7418637275695801, |
|
"learning_rate": 5.015386453917742e-07, |
|
"loss": 0.9136, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.7025316455696202, |
|
"grad_norm": 0.7381477952003479, |
|
"learning_rate": 4.793678989964207e-07, |
|
"loss": 0.9842, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.7088607594936709, |
|
"grad_norm": 0.6637036800384521, |
|
"learning_rate": 4.576736409023813e-07, |
|
"loss": 0.8811, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.7151898734177216, |
|
"grad_norm": 0.7035127878189087, |
|
"learning_rate": 4.364581577500987e-07, |
|
"loss": 0.8982, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.721518987341772, |
|
"grad_norm": 0.7062835693359375, |
|
"learning_rate": 4.15723685715686e-07, |
|
"loss": 1.0026, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.7278481012658227, |
|
"grad_norm": 0.7588692307472229, |
|
"learning_rate": 3.9547241027523164e-07, |
|
"loss": 0.951, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.7341772151898733, |
|
"grad_norm": 0.825013279914856, |
|
"learning_rate": 3.7570646597444196e-07, |
|
"loss": 1.0033, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.740506329113924, |
|
"grad_norm": 0.7376324534416199, |
|
"learning_rate": 3.564279362036488e-07, |
|
"loss": 0.9115, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.7468354430379747, |
|
"grad_norm": 0.7513666152954102, |
|
"learning_rate": 3.3763885297822153e-07, |
|
"loss": 0.953, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.7468354430379747, |
|
"eval_loss": 0.9792006015777588, |
|
"eval_runtime": 26.9932, |
|
"eval_samples_per_second": 2.853, |
|
"eval_steps_per_second": 0.37, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.7531645569620253, |
|
"grad_norm": 0.6882026195526123, |
|
"learning_rate": 3.1934119672438093e-07, |
|
"loss": 0.9758, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.759493670886076, |
|
"grad_norm": 0.6910998225212097, |
|
"learning_rate": 3.015368960704584e-07, |
|
"loss": 0.9528, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.7658227848101267, |
|
"grad_norm": 0.7122961282730103, |
|
"learning_rate": 2.842278276436128e-07, |
|
"loss": 0.9807, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.7721518987341773, |
|
"grad_norm": 0.7146971225738525, |
|
"learning_rate": 2.6741581587202747e-07, |
|
"loss": 0.9552, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.778481012658228, |
|
"grad_norm": 0.8139782547950745, |
|
"learning_rate": 2.511026327926114e-07, |
|
"loss": 0.8775, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.7848101265822784, |
|
"grad_norm": 0.7478958368301392, |
|
"learning_rate": 2.3528999786421758e-07, |
|
"loss": 0.9233, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.7911392405063291, |
|
"grad_norm": 0.7222813963890076, |
|
"learning_rate": 2.1997957778641166e-07, |
|
"loss": 0.971, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.7974683544303798, |
|
"grad_norm": 0.8937734961509705, |
|
"learning_rate": 2.0517298632379445e-07, |
|
"loss": 0.9891, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.8037974683544302, |
|
"grad_norm": 0.6277683973312378, |
|
"learning_rate": 1.908717841359048e-07, |
|
"loss": 0.9439, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.810126582278481, |
|
"grad_norm": 0.763795018196106, |
|
"learning_rate": 1.770774786127244e-07, |
|
"loss": 1.029, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.8164556962025316, |
|
"grad_norm": 0.7020488381385803, |
|
"learning_rate": 1.6379152371579277e-07, |
|
"loss": 0.9442, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.8227848101265822, |
|
"grad_norm": 0.7853723764419556, |
|
"learning_rate": 1.510153198249531e-07, |
|
"loss": 0.9591, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.8291139240506329, |
|
"grad_norm": 0.6295353174209595, |
|
"learning_rate": 1.3875021359075257e-07, |
|
"loss": 0.919, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.8354430379746836, |
|
"grad_norm": 0.7198224067687988, |
|
"learning_rate": 1.2699749779249926e-07, |
|
"loss": 0.9191, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.8417721518987342, |
|
"grad_norm": 0.7016299366950989, |
|
"learning_rate": 1.157584112019966e-07, |
|
"loss": 0.9477, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.8481012658227849, |
|
"grad_norm": 0.6811798810958862, |
|
"learning_rate": 1.0503413845297739e-07, |
|
"loss": 0.9135, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.8544303797468356, |
|
"grad_norm": 0.7357584834098816, |
|
"learning_rate": 9.482580991623747e-08, |
|
"loss": 0.9744, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.8607594936708862, |
|
"grad_norm": 0.7015813589096069, |
|
"learning_rate": 8.513450158049109e-08, |
|
"loss": 0.9635, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.8670886075949367, |
|
"grad_norm": 0.6997948288917542, |
|
"learning_rate": 7.59612349389599e-08, |
|
"loss": 0.9032, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.8734177215189873, |
|
"grad_norm": 0.8062969446182251, |
|
"learning_rate": 6.730697688170251e-08, |
|
"loss": 0.8912, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.879746835443038, |
|
"grad_norm": 0.7720515131950378, |
|
"learning_rate": 5.917263959370312e-08, |
|
"loss": 0.9979, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.8860759493670884, |
|
"grad_norm": 0.6396417021751404, |
|
"learning_rate": 5.155908045872349e-08, |
|
"loss": 0.9183, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.8924050632911391, |
|
"grad_norm": 0.6847640872001648, |
|
"learning_rate": 4.446710196893245e-08, |
|
"loss": 0.9314, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.8987341772151898, |
|
"grad_norm": 0.7423564791679382, |
|
"learning_rate": 3.7897451640321326e-08, |
|
"loss": 0.9059, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.9050632911392404, |
|
"grad_norm": 0.785251796245575, |
|
"learning_rate": 3.185082193391143e-08, |
|
"loss": 0.9114, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.9113924050632911, |
|
"grad_norm": 0.6015980243682861, |
|
"learning_rate": 2.6327850182769065e-08, |
|
"loss": 0.8995, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.9177215189873418, |
|
"grad_norm": 0.713444709777832, |
|
"learning_rate": 2.1329118524827662e-08, |
|
"loss": 0.9508, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.9240506329113924, |
|
"grad_norm": 0.73280930519104, |
|
"learning_rate": 1.6855153841527915e-08, |
|
"loss": 0.8864, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.9303797468354431, |
|
"grad_norm": 0.7004281878471375, |
|
"learning_rate": 1.2906427702284452e-08, |
|
"loss": 0.8865, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.9367088607594938, |
|
"grad_norm": 0.7322970032691956, |
|
"learning_rate": 9.48335631477948e-09, |
|
"loss": 1.0077, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.9430379746835444, |
|
"grad_norm": 0.7232335209846497, |
|
"learning_rate": 6.586300481095098e-09, |
|
"loss": 0.9158, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.9493670886075949, |
|
"grad_norm": 0.9513888955116272, |
|
"learning_rate": 4.2155655596809455e-09, |
|
"loss": 0.9854, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.9556962025316456, |
|
"grad_norm": 0.7984898090362549, |
|
"learning_rate": 2.371401433170495e-09, |
|
"loss": 0.8758, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.9620253164556962, |
|
"grad_norm": 0.7864643931388855, |
|
"learning_rate": 1.054002482043237e-09, |
|
"loss": 0.8773, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.9683544303797469, |
|
"grad_norm": 0.8517611622810364, |
|
"learning_rate": 2.6350756413440203e-10, |
|
"loss": 0.9853, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.9746835443037973, |
|
"grad_norm": 0.8657993674278259, |
|
"learning_rate": 0.0, |
|
"loss": 0.9612, |
|
"step": 316 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 316, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 79, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.748009197213057e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|