|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0331262939958592, |
|
"eval_steps": 500, |
|
"global_step": 250, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004140786749482402, |
|
"grad_norm": 0.49736908078193665, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 2.1632, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008281573498964804, |
|
"grad_norm": 0.4018385708332062, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 2.1448, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012422360248447204, |
|
"grad_norm": 0.48689907789230347, |
|
"learning_rate": 7.2e-06, |
|
"loss": 2.3454, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.016563146997929608, |
|
"grad_norm": 0.5226960182189941, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 2.0872, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.020703933747412008, |
|
"grad_norm": 0.44197356700897217, |
|
"learning_rate": 1.2e-05, |
|
"loss": 2.0461, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.024844720496894408, |
|
"grad_norm": 0.3927185833454132, |
|
"learning_rate": 1.44e-05, |
|
"loss": 2.0965, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.028985507246376812, |
|
"grad_norm": 0.37648865580558777, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 1.7218, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.033126293995859216, |
|
"grad_norm": 0.14497901499271393, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 1.8762, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.037267080745341616, |
|
"grad_norm": 0.07990705966949463, |
|
"learning_rate": 2.16e-05, |
|
"loss": 1.8117, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.041407867494824016, |
|
"grad_norm": 0.08823492377996445, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.9386, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.045548654244306416, |
|
"grad_norm": 0.11864794045686722, |
|
"learning_rate": 2.64e-05, |
|
"loss": 1.7931, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.049689440993788817, |
|
"grad_norm": 0.23525194823741913, |
|
"learning_rate": 2.88e-05, |
|
"loss": 1.8439, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.053830227743271224, |
|
"grad_norm": 0.16807658970355988, |
|
"learning_rate": 3.12e-05, |
|
"loss": 2.0359, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.057971014492753624, |
|
"grad_norm": 0.15336070954799652, |
|
"learning_rate": 3.3600000000000004e-05, |
|
"loss": 1.9366, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.062111801242236024, |
|
"grad_norm": 0.18158744275569916, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.8926, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06625258799171843, |
|
"grad_norm": 0.12008249014616013, |
|
"learning_rate": 3.8400000000000005e-05, |
|
"loss": 1.909, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07039337474120083, |
|
"grad_norm": 0.211439311504364, |
|
"learning_rate": 4.08e-05, |
|
"loss": 1.8392, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.07453416149068323, |
|
"grad_norm": 0.173500657081604, |
|
"learning_rate": 4.32e-05, |
|
"loss": 1.7849, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.07867494824016563, |
|
"grad_norm": 0.10626032948493958, |
|
"learning_rate": 4.5600000000000004e-05, |
|
"loss": 1.9013, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08281573498964803, |
|
"grad_norm": 0.13957469165325165, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.5716, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08695652173913043, |
|
"grad_norm": 0.08517848700284958, |
|
"learning_rate": 5.04e-05, |
|
"loss": 2.0001, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09109730848861283, |
|
"grad_norm": 0.0832364410161972, |
|
"learning_rate": 5.28e-05, |
|
"loss": 2.0204, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.09523809523809523, |
|
"grad_norm": 0.09407418221235275, |
|
"learning_rate": 5.520000000000001e-05, |
|
"loss": 1.9757, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.09937888198757763, |
|
"grad_norm": 0.10470325499773026, |
|
"learning_rate": 5.76e-05, |
|
"loss": 1.9063, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.10351966873706005, |
|
"grad_norm": 0.07362315058708191, |
|
"learning_rate": 6e-05, |
|
"loss": 1.9674, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.10766045548654245, |
|
"grad_norm": 0.07604512572288513, |
|
"learning_rate": 5.999929114583739e-05, |
|
"loss": 1.9792, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.11180124223602485, |
|
"grad_norm": 0.08372924476861954, |
|
"learning_rate": 5.999716461684787e-05, |
|
"loss": 1.7242, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11594202898550725, |
|
"grad_norm": 0.05084725096821785, |
|
"learning_rate": 5.9993620513524664e-05, |
|
"loss": 1.8951, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.12008281573498965, |
|
"grad_norm": 0.06014450266957283, |
|
"learning_rate": 5.99886590033513e-05, |
|
"loss": 1.9171, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12422360248447205, |
|
"grad_norm": 0.06821330636739731, |
|
"learning_rate": 5.9982280320793565e-05, |
|
"loss": 1.8141, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12836438923395446, |
|
"grad_norm": 0.07801458984613419, |
|
"learning_rate": 5.997448476728852e-05, |
|
"loss": 1.8132, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.13250517598343686, |
|
"grad_norm": 0.05670145899057388, |
|
"learning_rate": 5.9965272711230184e-05, |
|
"loss": 2.0554, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.13664596273291926, |
|
"grad_norm": 0.042144160717725754, |
|
"learning_rate": 5.995464458795218e-05, |
|
"loss": 1.8347, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.14078674948240166, |
|
"grad_norm": 0.0534859336912632, |
|
"learning_rate": 5.994260089970716e-05, |
|
"loss": 1.7916, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14492753623188406, |
|
"grad_norm": 0.06442283093929291, |
|
"learning_rate": 5.9929142215643e-05, |
|
"loss": 1.9183, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.14906832298136646, |
|
"grad_norm": 0.06278888881206512, |
|
"learning_rate": 5.991426917177598e-05, |
|
"loss": 1.728, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.15320910973084886, |
|
"grad_norm": 0.09182301163673401, |
|
"learning_rate": 5.989798247096073e-05, |
|
"loss": 1.6317, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.15734989648033126, |
|
"grad_norm": 0.10667438805103302, |
|
"learning_rate": 5.988028288285694e-05, |
|
"loss": 1.9076, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.16149068322981366, |
|
"grad_norm": 0.0852225124835968, |
|
"learning_rate": 5.9861171243893064e-05, |
|
"loss": 1.8946, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.16563146997929606, |
|
"grad_norm": 0.07862062007188797, |
|
"learning_rate": 5.984064845722676e-05, |
|
"loss": 1.8459, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.16977225672877846, |
|
"grad_norm": 0.07918703556060791, |
|
"learning_rate": 5.98187154927022e-05, |
|
"loss": 1.9028, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.17391304347826086, |
|
"grad_norm": 0.06476736068725586, |
|
"learning_rate": 5.9795373386804276e-05, |
|
"loss": 1.9157, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.17805383022774326, |
|
"grad_norm": 0.06835038214921951, |
|
"learning_rate": 5.977062324260958e-05, |
|
"loss": 1.8086, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.18219461697722567, |
|
"grad_norm": 0.0643422082066536, |
|
"learning_rate": 5.974446622973429e-05, |
|
"loss": 1.9354, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.18633540372670807, |
|
"grad_norm": 0.08603407442569733, |
|
"learning_rate": 5.9716903584278915e-05, |
|
"loss": 1.9143, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.19047619047619047, |
|
"grad_norm": 0.04672611877322197, |
|
"learning_rate": 5.9687936608769837e-05, |
|
"loss": 1.7765, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.19461697722567287, |
|
"grad_norm": 0.04983381927013397, |
|
"learning_rate": 5.965756667209781e-05, |
|
"loss": 1.9998, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.19875776397515527, |
|
"grad_norm": 0.04628973454236984, |
|
"learning_rate": 5.962579520945323e-05, |
|
"loss": 1.8665, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.2028985507246377, |
|
"grad_norm": 0.0615781731903553, |
|
"learning_rate": 5.959262372225834e-05, |
|
"loss": 1.8574, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2070393374741201, |
|
"grad_norm": 0.06260473281145096, |
|
"learning_rate": 5.955805377809627e-05, |
|
"loss": 1.9896, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2111801242236025, |
|
"grad_norm": 0.04908996820449829, |
|
"learning_rate": 5.952208701063691e-05, |
|
"loss": 1.8685, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2153209109730849, |
|
"grad_norm": 0.0652541071176529, |
|
"learning_rate": 5.94847251195598e-05, |
|
"loss": 1.9012, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.2194616977225673, |
|
"grad_norm": 0.05220969021320343, |
|
"learning_rate": 5.9445969870473745e-05, |
|
"loss": 1.7153, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.2236024844720497, |
|
"grad_norm": 0.05003441497683525, |
|
"learning_rate": 5.940582309483338e-05, |
|
"loss": 2.0243, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2277432712215321, |
|
"grad_norm": 0.04857470095157623, |
|
"learning_rate": 5.936428668985265e-05, |
|
"loss": 1.9524, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2318840579710145, |
|
"grad_norm": 0.049168910831213, |
|
"learning_rate": 5.932136261841511e-05, |
|
"loss": 1.741, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2360248447204969, |
|
"grad_norm": 0.057303208857774734, |
|
"learning_rate": 5.9277052908981214e-05, |
|
"loss": 1.9622, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2401656314699793, |
|
"grad_norm": 0.04970083758234978, |
|
"learning_rate": 5.923135965549244e-05, |
|
"loss": 1.7606, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2443064182194617, |
|
"grad_norm": 0.048351775854825974, |
|
"learning_rate": 5.9184285017272304e-05, |
|
"loss": 2.0664, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2484472049689441, |
|
"grad_norm": 0.04087584838271141, |
|
"learning_rate": 5.9135831218924354e-05, |
|
"loss": 2.0019, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2525879917184265, |
|
"grad_norm": 0.05114104971289635, |
|
"learning_rate": 5.908600055022705e-05, |
|
"loss": 1.9351, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2567287784679089, |
|
"grad_norm": 0.04958435893058777, |
|
"learning_rate": 5.9034795366025494e-05, |
|
"loss": 1.8421, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2608695652173913, |
|
"grad_norm": 0.052330292761325836, |
|
"learning_rate": 5.898221808612025e-05, |
|
"loss": 1.7381, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2650103519668737, |
|
"grad_norm": 0.04455335810780525, |
|
"learning_rate": 5.8928271195152864e-05, |
|
"loss": 1.8632, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2691511387163561, |
|
"grad_norm": 0.050691474229097366, |
|
"learning_rate": 5.8872957242488585e-05, |
|
"loss": 1.998, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2732919254658385, |
|
"grad_norm": 0.04122067987918854, |
|
"learning_rate": 5.881627884209576e-05, |
|
"loss": 1.8917, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2774327122153209, |
|
"grad_norm": 0.04583807662129402, |
|
"learning_rate": 5.875823867242242e-05, |
|
"loss": 1.927, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2815734989648033, |
|
"grad_norm": 0.06381084024906158, |
|
"learning_rate": 5.8698839476269585e-05, |
|
"loss": 1.6846, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 0.049351632595062256, |
|
"learning_rate": 5.863808406066178e-05, |
|
"loss": 1.9392, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2898550724637681, |
|
"grad_norm": 0.06118392199277878, |
|
"learning_rate": 5.8575975296714274e-05, |
|
"loss": 1.7415, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2939958592132505, |
|
"grad_norm": 0.07954799383878708, |
|
"learning_rate": 5.851251611949747e-05, |
|
"loss": 1.8413, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2981366459627329, |
|
"grad_norm": 0.045418642461299896, |
|
"learning_rate": 5.8447709527898164e-05, |
|
"loss": 1.6506, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3022774327122153, |
|
"grad_norm": 0.04858332872390747, |
|
"learning_rate": 5.838155858447782e-05, |
|
"loss": 2.0256, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3064182194616977, |
|
"grad_norm": 0.046768918633461, |
|
"learning_rate": 5.831406641532789e-05, |
|
"loss": 1.9066, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3105590062111801, |
|
"grad_norm": 0.05008407309651375, |
|
"learning_rate": 5.824523620992205e-05, |
|
"loss": 1.7993, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3146997929606625, |
|
"grad_norm": 0.04596088081598282, |
|
"learning_rate": 5.8175071220965457e-05, |
|
"loss": 1.8114, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3188405797101449, |
|
"grad_norm": 0.048186447471380234, |
|
"learning_rate": 5.810357476424109e-05, |
|
"loss": 1.9843, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.32298136645962733, |
|
"grad_norm": 0.05133625492453575, |
|
"learning_rate": 5.8030750218453006e-05, |
|
"loss": 1.7612, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.32712215320910976, |
|
"grad_norm": 0.04953281581401825, |
|
"learning_rate": 5.795660102506671e-05, |
|
"loss": 1.7625, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.33126293995859213, |
|
"grad_norm": 0.04570222645998001, |
|
"learning_rate": 5.788113068814648e-05, |
|
"loss": 1.795, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.33540372670807456, |
|
"grad_norm": 0.11745952069759369, |
|
"learning_rate": 5.7804342774189835e-05, |
|
"loss": 1.7432, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.33954451345755693, |
|
"grad_norm": 0.05133218690752983, |
|
"learning_rate": 5.7726240911958916e-05, |
|
"loss": 1.8884, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.34368530020703936, |
|
"grad_norm": 0.050902482122182846, |
|
"learning_rate": 5.7646828792309084e-05, |
|
"loss": 1.889, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.34782608695652173, |
|
"grad_norm": 0.05631539598107338, |
|
"learning_rate": 5.756611016801442e-05, |
|
"loss": 1.845, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.35196687370600416, |
|
"grad_norm": 0.05162457004189491, |
|
"learning_rate": 5.7484088853590474e-05, |
|
"loss": 1.7412, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.35610766045548653, |
|
"grad_norm": 0.06242730841040611, |
|
"learning_rate": 5.740076872511391e-05, |
|
"loss": 1.941, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.36024844720496896, |
|
"grad_norm": 0.05125703290104866, |
|
"learning_rate": 5.731615372003939e-05, |
|
"loss": 1.7513, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.36438923395445133, |
|
"grad_norm": 0.056337494403123856, |
|
"learning_rate": 5.7230247837013484e-05, |
|
"loss": 1.8812, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.36853002070393376, |
|
"grad_norm": 0.056918010115623474, |
|
"learning_rate": 5.714305513568571e-05, |
|
"loss": 1.6549, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.37267080745341613, |
|
"grad_norm": 0.04984583333134651, |
|
"learning_rate": 5.705457973651668e-05, |
|
"loss": 1.9668, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.37681159420289856, |
|
"grad_norm": 0.0550064854323864, |
|
"learning_rate": 5.69648258205834e-05, |
|
"loss": 1.8977, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 0.0585719496011734, |
|
"learning_rate": 5.6873797629381675e-05, |
|
"loss": 1.6596, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.38509316770186336, |
|
"grad_norm": 0.0636669397354126, |
|
"learning_rate": 5.678149946462564e-05, |
|
"loss": 1.9142, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.38923395445134573, |
|
"grad_norm": 0.05703834444284439, |
|
"learning_rate": 5.6687935688044516e-05, |
|
"loss": 1.7277, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.39337474120082816, |
|
"grad_norm": 0.05158121883869171, |
|
"learning_rate": 5.6593110721176475e-05, |
|
"loss": 1.9425, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.39751552795031053, |
|
"grad_norm": 0.053467340767383575, |
|
"learning_rate": 5.649702904515969e-05, |
|
"loss": 1.9008, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.40165631469979296, |
|
"grad_norm": 0.060619086027145386, |
|
"learning_rate": 5.6399695200520537e-05, |
|
"loss": 1.7648, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4057971014492754, |
|
"grad_norm": 0.05301009491086006, |
|
"learning_rate": 5.63011137869591e-05, |
|
"loss": 1.8785, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.40993788819875776, |
|
"grad_norm": 0.05011270195245743, |
|
"learning_rate": 5.620128946313172e-05, |
|
"loss": 1.9473, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4140786749482402, |
|
"grad_norm": 0.04839683696627617, |
|
"learning_rate": 5.610022694643091e-05, |
|
"loss": 1.8244, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.41821946169772256, |
|
"grad_norm": 0.058406904339790344, |
|
"learning_rate": 5.5997931012762374e-05, |
|
"loss": 1.7561, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.422360248447205, |
|
"grad_norm": 0.04675458371639252, |
|
"learning_rate": 5.589440649631933e-05, |
|
"loss": 1.964, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.42650103519668736, |
|
"grad_norm": 0.05772264674305916, |
|
"learning_rate": 5.578965828935409e-05, |
|
"loss": 1.8115, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4306418219461698, |
|
"grad_norm": 0.05551251769065857, |
|
"learning_rate": 5.568369134194681e-05, |
|
"loss": 1.6467, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.43478260869565216, |
|
"grad_norm": 0.05348852276802063, |
|
"learning_rate": 5.557651066177161e-05, |
|
"loss": 1.7895, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4389233954451346, |
|
"grad_norm": 0.051892660558223724, |
|
"learning_rate": 5.546812131385991e-05, |
|
"loss": 1.7947, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.44306418219461696, |
|
"grad_norm": 0.058433856815099716, |
|
"learning_rate": 5.535852842036107e-05, |
|
"loss": 1.8444, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.4472049689440994, |
|
"grad_norm": 0.059038013219833374, |
|
"learning_rate": 5.5247737160300346e-05, |
|
"loss": 1.8436, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.45134575569358176, |
|
"grad_norm": 0.052642837166786194, |
|
"learning_rate": 5.513575276933413e-05, |
|
"loss": 1.9976, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.4554865424430642, |
|
"grad_norm": 0.05203791335225105, |
|
"learning_rate": 5.502258053950253e-05, |
|
"loss": 1.8925, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.45962732919254656, |
|
"grad_norm": 0.05623999238014221, |
|
"learning_rate": 5.490822581897929e-05, |
|
"loss": 1.81, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.463768115942029, |
|
"grad_norm": 0.06250981241464615, |
|
"learning_rate": 5.479269401181907e-05, |
|
"loss": 1.6598, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.46790890269151136, |
|
"grad_norm": 0.06358765810728073, |
|
"learning_rate": 5.4675990577702005e-05, |
|
"loss": 1.6401, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.4720496894409938, |
|
"grad_norm": 0.058844760060310364, |
|
"learning_rate": 5.455812103167579e-05, |
|
"loss": 1.7858, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 0.06053264066576958, |
|
"learning_rate": 5.443909094389497e-05, |
|
"loss": 1.7032, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4803312629399586, |
|
"grad_norm": 0.05357799679040909, |
|
"learning_rate": 5.4318905939357756e-05, |
|
"loss": 1.6963, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.484472049689441, |
|
"grad_norm": 0.05877089872956276, |
|
"learning_rate": 5.4197571697640206e-05, |
|
"loss": 1.721, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4886128364389234, |
|
"grad_norm": 0.05849786847829819, |
|
"learning_rate": 5.40750939526278e-05, |
|
"loss": 1.9869, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.4927536231884058, |
|
"grad_norm": 0.06864643096923828, |
|
"learning_rate": 5.395147849224451e-05, |
|
"loss": 1.7978, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.4968944099378882, |
|
"grad_norm": 0.06335002183914185, |
|
"learning_rate": 5.382673115817923e-05, |
|
"loss": 2.163, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5010351966873706, |
|
"grad_norm": 0.06154783070087433, |
|
"learning_rate": 5.3700857845609764e-05, |
|
"loss": 2.0194, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.505175983436853, |
|
"grad_norm": 0.06728032231330872, |
|
"learning_rate": 5.357386450292422e-05, |
|
"loss": 1.7549, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5093167701863354, |
|
"grad_norm": 0.05433879792690277, |
|
"learning_rate": 5.3445757131439915e-05, |
|
"loss": 1.9202, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5134575569358178, |
|
"grad_norm": 0.05439319089055061, |
|
"learning_rate": 5.3316541785119734e-05, |
|
"loss": 1.826, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5175983436853002, |
|
"grad_norm": 0.0665251687169075, |
|
"learning_rate": 5.318622457028609e-05, |
|
"loss": 1.435, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5217391304347826, |
|
"grad_norm": 0.05804029107093811, |
|
"learning_rate": 5.305481164533233e-05, |
|
"loss": 1.827, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.525879917184265, |
|
"grad_norm": 0.06869634985923767, |
|
"learning_rate": 5.292230922043171e-05, |
|
"loss": 1.8714, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5300207039337475, |
|
"grad_norm": 0.056967902928590775, |
|
"learning_rate": 5.2788723557243936e-05, |
|
"loss": 1.7836, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5341614906832298, |
|
"grad_norm": 0.06087717413902283, |
|
"learning_rate": 5.265406096861923e-05, |
|
"loss": 1.661, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5383022774327122, |
|
"grad_norm": 0.0705471932888031, |
|
"learning_rate": 5.251832781830002e-05, |
|
"loss": 1.8054, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5424430641821946, |
|
"grad_norm": 0.06314948201179504, |
|
"learning_rate": 5.238153052062022e-05, |
|
"loss": 1.786, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.546583850931677, |
|
"grad_norm": 0.06368881464004517, |
|
"learning_rate": 5.224367554020208e-05, |
|
"loss": 2.0254, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5507246376811594, |
|
"grad_norm": 0.06457039713859558, |
|
"learning_rate": 5.2104769391650726e-05, |
|
"loss": 1.8116, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5548654244306418, |
|
"grad_norm": 0.056793998926877975, |
|
"learning_rate": 5.196481863924626e-05, |
|
"loss": 1.8454, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5590062111801242, |
|
"grad_norm": 0.058062594383955, |
|
"learning_rate": 5.182382989663357e-05, |
|
"loss": 1.8074, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5631469979296067, |
|
"grad_norm": 0.05942286178469658, |
|
"learning_rate": 5.16818098265098e-05, |
|
"loss": 1.9194, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.567287784679089, |
|
"grad_norm": 0.0624711774289608, |
|
"learning_rate": 5.153876514030948e-05, |
|
"loss": 1.8713, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.06994223594665527, |
|
"learning_rate": 5.139470259788736e-05, |
|
"loss": 1.8384, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.5755693581780539, |
|
"grad_norm": 0.06756718456745148, |
|
"learning_rate": 5.124962900719895e-05, |
|
"loss": 1.7687, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5797101449275363, |
|
"grad_norm": 0.08007334172725677, |
|
"learning_rate": 5.110355122397885e-05, |
|
"loss": 1.7295, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5838509316770186, |
|
"grad_norm": 0.06464424729347229, |
|
"learning_rate": 5.095647615141668e-05, |
|
"loss": 1.9968, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.587991718426501, |
|
"grad_norm": 0.062113020569086075, |
|
"learning_rate": 5.080841073983095e-05, |
|
"loss": 1.7497, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.5921325051759835, |
|
"grad_norm": 0.06131444498896599, |
|
"learning_rate": 5.0659361986340544e-05, |
|
"loss": 1.7768, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.5962732919254659, |
|
"grad_norm": 0.06630904972553253, |
|
"learning_rate": 5.0509336934534086e-05, |
|
"loss": 1.9866, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6004140786749482, |
|
"grad_norm": 0.06003854051232338, |
|
"learning_rate": 5.035834267413708e-05, |
|
"loss": 1.8859, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6045548654244306, |
|
"grad_norm": 0.07283063977956772, |
|
"learning_rate": 5.020638634067685e-05, |
|
"loss": 1.8534, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6086956521739131, |
|
"grad_norm": 0.06754714250564575, |
|
"learning_rate": 5.005347511514537e-05, |
|
"loss": 1.5808, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6128364389233955, |
|
"grad_norm": 0.060414694249629974, |
|
"learning_rate": 4.9899616223659886e-05, |
|
"loss": 1.8807, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6169772256728778, |
|
"grad_norm": 0.05546625331044197, |
|
"learning_rate": 4.974481693712145e-05, |
|
"loss": 1.937, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6211180124223602, |
|
"grad_norm": 0.07293254137039185, |
|
"learning_rate": 4.95890845708713e-05, |
|
"loss": 1.8542, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6252587991718427, |
|
"grad_norm": 0.07628627121448517, |
|
"learning_rate": 4.943242648434515e-05, |
|
"loss": 1.5924, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.629399585921325, |
|
"grad_norm": 0.09479758888483047, |
|
"learning_rate": 4.927485008072549e-05, |
|
"loss": 1.7652, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.6335403726708074, |
|
"grad_norm": 0.061082031577825546, |
|
"learning_rate": 4.911636280659161e-05, |
|
"loss": 1.7477, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6376811594202898, |
|
"grad_norm": 0.06984806805849075, |
|
"learning_rate": 4.8956972151567766e-05, |
|
"loss": 1.9104, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6418219461697723, |
|
"grad_norm": 0.09679781645536423, |
|
"learning_rate": 4.8796685647969267e-05, |
|
"loss": 1.7935, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6459627329192547, |
|
"grad_norm": 0.06585158407688141, |
|
"learning_rate": 4.8635510870446435e-05, |
|
"loss": 1.6618, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.650103519668737, |
|
"grad_norm": 0.07509002834558487, |
|
"learning_rate": 4.8473455435626754e-05, |
|
"loss": 1.7686, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.6542443064182195, |
|
"grad_norm": 0.07331015914678574, |
|
"learning_rate": 4.8310527001754874e-05, |
|
"loss": 1.7972, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.6583850931677019, |
|
"grad_norm": 0.07571995258331299, |
|
"learning_rate": 4.814673326833067e-05, |
|
"loss": 1.798, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.6625258799171843, |
|
"grad_norm": 0.05544979125261307, |
|
"learning_rate": 4.7982081975745476e-05, |
|
"loss": 1.8603, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.06246506795287132, |
|
"learning_rate": 4.781658090491623e-05, |
|
"loss": 2.0877, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.6708074534161491, |
|
"grad_norm": 0.057248059660196304, |
|
"learning_rate": 4.765023787691779e-05, |
|
"loss": 1.857, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.6749482401656315, |
|
"grad_norm": 0.0628746747970581, |
|
"learning_rate": 4.748306075261335e-05, |
|
"loss": 1.805, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.6790890269151139, |
|
"grad_norm": 0.06979019939899445, |
|
"learning_rate": 4.731505743228294e-05, |
|
"loss": 1.7996, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.6832298136645962, |
|
"grad_norm": 0.058253347873687744, |
|
"learning_rate": 4.714623585525009e-05, |
|
"loss": 1.9695, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6873706004140787, |
|
"grad_norm": 0.05848124623298645, |
|
"learning_rate": 4.697660399950663e-05, |
|
"loss": 1.7858, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.6915113871635611, |
|
"grad_norm": 0.0602106973528862, |
|
"learning_rate": 4.680616988133572e-05, |
|
"loss": 1.9007, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.6956521739130435, |
|
"grad_norm": 0.06956855952739716, |
|
"learning_rate": 4.663494155493296e-05, |
|
"loss": 1.8796, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.6997929606625258, |
|
"grad_norm": 0.055345602333545685, |
|
"learning_rate": 4.64629271120258e-05, |
|
"loss": 1.7476, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.7039337474120083, |
|
"grad_norm": 0.06562691926956177, |
|
"learning_rate": 4.6290134681491165e-05, |
|
"loss": 1.7378, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7080745341614907, |
|
"grad_norm": 0.07123929262161255, |
|
"learning_rate": 4.611657242897131e-05, |
|
"loss": 1.7195, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.7122153209109731, |
|
"grad_norm": 0.07925646752119064, |
|
"learning_rate": 4.5942248556487914e-05, |
|
"loss": 1.8299, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7163561076604554, |
|
"grad_norm": 0.088850237429142, |
|
"learning_rate": 4.576717130205448e-05, |
|
"loss": 1.8684, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7204968944099379, |
|
"grad_norm": 0.06837620586156845, |
|
"learning_rate": 4.559134893928705e-05, |
|
"loss": 1.7766, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7246376811594203, |
|
"grad_norm": 0.06634443253278732, |
|
"learning_rate": 4.541478977701321e-05, |
|
"loss": 1.8053, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7287784679089027, |
|
"grad_norm": 0.06066809594631195, |
|
"learning_rate": 4.523750215887942e-05, |
|
"loss": 2.0255, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7329192546583851, |
|
"grad_norm": 0.07539010792970657, |
|
"learning_rate": 4.505949446295677e-05, |
|
"loss": 1.902, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.7370600414078675, |
|
"grad_norm": 0.08485755324363708, |
|
"learning_rate": 4.488077510134499e-05, |
|
"loss": 1.752, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.7412008281573499, |
|
"grad_norm": 0.06715612858533859, |
|
"learning_rate": 4.4701352519774986e-05, |
|
"loss": 1.8651, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.7453416149068323, |
|
"grad_norm": 0.05764465034008026, |
|
"learning_rate": 4.4521235197209686e-05, |
|
"loss": 1.9279, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7494824016563147, |
|
"grad_norm": 0.0972018614411354, |
|
"learning_rate": 4.434043164544333e-05, |
|
"loss": 1.7838, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.7536231884057971, |
|
"grad_norm": 0.06594452261924744, |
|
"learning_rate": 4.415895040869929e-05, |
|
"loss": 1.8846, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.7577639751552795, |
|
"grad_norm": 0.0821613073348999, |
|
"learning_rate": 4.397680006322622e-05, |
|
"loss": 1.5778, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.7619047619047619, |
|
"grad_norm": 0.07455030083656311, |
|
"learning_rate": 4.379398921689284e-05, |
|
"loss": 1.9354, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.7660455486542443, |
|
"grad_norm": 0.07369917631149292, |
|
"learning_rate": 4.361052650878111e-05, |
|
"loss": 1.6528, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7701863354037267, |
|
"grad_norm": 0.06145176291465759, |
|
"learning_rate": 4.342642060877798e-05, |
|
"loss": 1.8605, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.7743271221532091, |
|
"grad_norm": 0.0714261382818222, |
|
"learning_rate": 4.324168021716568e-05, |
|
"loss": 1.9954, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.7784679089026915, |
|
"grad_norm": 0.07373001426458359, |
|
"learning_rate": 4.30563140642106e-05, |
|
"loss": 1.6576, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.782608695652174, |
|
"grad_norm": 0.08491583913564682, |
|
"learning_rate": 4.287033090975068e-05, |
|
"loss": 1.8222, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.7867494824016563, |
|
"grad_norm": 0.08685383945703506, |
|
"learning_rate": 4.268373954278145e-05, |
|
"loss": 1.7194, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7908902691511387, |
|
"grad_norm": 0.0834331288933754, |
|
"learning_rate": 4.249654878104073e-05, |
|
"loss": 1.7173, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.7950310559006211, |
|
"grad_norm": 0.09984423220157623, |
|
"learning_rate": 4.230876747059192e-05, |
|
"loss": 1.7618, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.7991718426501035, |
|
"grad_norm": 0.0643148198723793, |
|
"learning_rate": 4.2120404485405886e-05, |
|
"loss": 1.8246, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.8033126293995859, |
|
"grad_norm": 0.06520809233188629, |
|
"learning_rate": 4.193146872694173e-05, |
|
"loss": 1.827, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.8074534161490683, |
|
"grad_norm": 0.07631880044937134, |
|
"learning_rate": 4.174196912372605e-05, |
|
"loss": 1.9521, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8115942028985508, |
|
"grad_norm": 0.08804851770401001, |
|
"learning_rate": 4.1551914630930986e-05, |
|
"loss": 1.8057, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.8157349896480331, |
|
"grad_norm": 0.10719820857048035, |
|
"learning_rate": 4.1361314229951114e-05, |
|
"loss": 1.7777, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8198757763975155, |
|
"grad_norm": 0.09046828001737595, |
|
"learning_rate": 4.117017692797894e-05, |
|
"loss": 1.8581, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.8240165631469979, |
|
"grad_norm": 0.06453605741262436, |
|
"learning_rate": 4.0978511757579266e-05, |
|
"loss": 1.7737, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.8281573498964804, |
|
"grad_norm": 0.06556473672389984, |
|
"learning_rate": 4.0786327776262355e-05, |
|
"loss": 1.8843, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8322981366459627, |
|
"grad_norm": 0.07110321521759033, |
|
"learning_rate": 4.059363406605589e-05, |
|
"loss": 1.8864, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.8364389233954451, |
|
"grad_norm": 0.06083063408732414, |
|
"learning_rate": 4.0400439733075764e-05, |
|
"loss": 1.6101, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.8405797101449275, |
|
"grad_norm": 0.0694584771990776, |
|
"learning_rate": 4.020675390709579e-05, |
|
"loss": 1.7452, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.84472049689441, |
|
"grad_norm": 0.08408311009407043, |
|
"learning_rate": 4.001258574111624e-05, |
|
"loss": 1.7393, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.8488612836438924, |
|
"grad_norm": 0.06838119029998779, |
|
"learning_rate": 3.9817944410931276e-05, |
|
"loss": 1.9128, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.8530020703933747, |
|
"grad_norm": 0.06725747138261795, |
|
"learning_rate": 3.962283911469539e-05, |
|
"loss": 1.8047, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 0.07443306595087051, |
|
"learning_rate": 3.942727907248867e-05, |
|
"loss": 1.8384, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.8612836438923396, |
|
"grad_norm": 0.05935392156243324, |
|
"learning_rate": 3.923127352588112e-05, |
|
"loss": 1.9675, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.865424430641822, |
|
"grad_norm": 0.07299666106700897, |
|
"learning_rate": 3.903483173749589e-05, |
|
"loss": 2.092, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.8695652173913043, |
|
"grad_norm": 0.062474749982357025, |
|
"learning_rate": 3.883796299057164e-05, |
|
"loss": 1.8276, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8737060041407867, |
|
"grad_norm": 0.059401869773864746, |
|
"learning_rate": 3.8640676588523735e-05, |
|
"loss": 1.7309, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.8778467908902692, |
|
"grad_norm": 0.1073634922504425, |
|
"learning_rate": 3.844298185450466e-05, |
|
"loss": 1.6717, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.8819875776397516, |
|
"grad_norm": 0.06048239767551422, |
|
"learning_rate": 3.824488813096342e-05, |
|
"loss": 1.854, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.8861283643892339, |
|
"grad_norm": 0.06702765077352524, |
|
"learning_rate": 3.804640477920406e-05, |
|
"loss": 1.8754, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.8902691511387164, |
|
"grad_norm": 0.07091647386550903, |
|
"learning_rate": 3.784754117894326e-05, |
|
"loss": 1.8767, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.8944099378881988, |
|
"grad_norm": 0.06531751155853271, |
|
"learning_rate": 3.764830672786705e-05, |
|
"loss": 1.776, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.8985507246376812, |
|
"grad_norm": 0.09837061166763306, |
|
"learning_rate": 3.744871084118679e-05, |
|
"loss": 1.7286, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.9026915113871635, |
|
"grad_norm": 0.08499777317047119, |
|
"learning_rate": 3.7248762951194145e-05, |
|
"loss": 1.7482, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.906832298136646, |
|
"grad_norm": 0.06564295291900635, |
|
"learning_rate": 3.704847250681538e-05, |
|
"loss": 1.7492, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.9109730848861284, |
|
"grad_norm": 0.0832459032535553, |
|
"learning_rate": 3.684784897316485e-05, |
|
"loss": 1.8582, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9151138716356108, |
|
"grad_norm": 0.061843667179346085, |
|
"learning_rate": 3.6646901831097695e-05, |
|
"loss": 1.8591, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.9192546583850931, |
|
"grad_norm": 0.09650903940200806, |
|
"learning_rate": 3.6445640576761774e-05, |
|
"loss": 1.676, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.9233954451345756, |
|
"grad_norm": 0.13103087246418, |
|
"learning_rate": 3.6244074721148947e-05, |
|
"loss": 1.6361, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.927536231884058, |
|
"grad_norm": 0.07348862290382385, |
|
"learning_rate": 3.6042213789645626e-05, |
|
"loss": 1.7993, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.9316770186335404, |
|
"grad_norm": 0.06679573655128479, |
|
"learning_rate": 3.584006732158255e-05, |
|
"loss": 1.9934, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.9358178053830227, |
|
"grad_norm": 0.06558524817228317, |
|
"learning_rate": 3.5637644869784075e-05, |
|
"loss": 1.8979, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.9399585921325052, |
|
"grad_norm": 0.0905718207359314, |
|
"learning_rate": 3.543495600011674e-05, |
|
"loss": 1.9162, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.9440993788819876, |
|
"grad_norm": 0.08884776383638382, |
|
"learning_rate": 3.523201029103711e-05, |
|
"loss": 1.884, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.94824016563147, |
|
"grad_norm": 0.10564474761486053, |
|
"learning_rate": 3.502881733313924e-05, |
|
"loss": 1.7368, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 0.06858230382204056, |
|
"learning_rate": 3.482538672870141e-05, |
|
"loss": 2.0018, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9565217391304348, |
|
"grad_norm": 0.06798145174980164, |
|
"learning_rate": 3.462172809123231e-05, |
|
"loss": 1.9542, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.9606625258799172, |
|
"grad_norm": 0.0749635100364685, |
|
"learning_rate": 3.441785104501683e-05, |
|
"loss": 1.8139, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.9648033126293996, |
|
"grad_norm": 0.06580834090709686, |
|
"learning_rate": 3.421376522466114e-05, |
|
"loss": 1.8621, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.968944099378882, |
|
"grad_norm": 0.10760274529457092, |
|
"learning_rate": 3.400948027463747e-05, |
|
"loss": 1.7851, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.9730848861283644, |
|
"grad_norm": 0.09195095300674438, |
|
"learning_rate": 3.38050058488283e-05, |
|
"loss": 1.8275, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.9772256728778468, |
|
"grad_norm": 0.09066396951675415, |
|
"learning_rate": 3.3600351610070144e-05, |
|
"loss": 1.7884, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.9813664596273292, |
|
"grad_norm": 0.09388420730829239, |
|
"learning_rate": 3.339552722969695e-05, |
|
"loss": 1.8267, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.9855072463768116, |
|
"grad_norm": 0.07875888049602509, |
|
"learning_rate": 3.319054238708301e-05, |
|
"loss": 1.8035, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.989648033126294, |
|
"grad_norm": 0.09944237768650055, |
|
"learning_rate": 3.298540676918562e-05, |
|
"loss": 1.5542, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.9937888198757764, |
|
"grad_norm": 0.11820376664400101, |
|
"learning_rate": 3.2780130070087196e-05, |
|
"loss": 1.7907, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.9979296066252588, |
|
"grad_norm": 0.0819728672504425, |
|
"learning_rate": 3.2574721990537245e-05, |
|
"loss": 1.8663, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.08972211927175522, |
|
"learning_rate": 3.236919223749394e-05, |
|
"loss": 1.9601, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.0041407867494825, |
|
"grad_norm": 0.0647214949131012, |
|
"learning_rate": 3.216355052366532e-05, |
|
"loss": 1.8954, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.0082815734989647, |
|
"grad_norm": 0.08813779801130295, |
|
"learning_rate": 3.195780656705039e-05, |
|
"loss": 1.6085, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.0124223602484472, |
|
"grad_norm": 0.1159641295671463, |
|
"learning_rate": 3.175197009047982e-05, |
|
"loss": 1.6147, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.0165631469979297, |
|
"grad_norm": 0.12031491100788116, |
|
"learning_rate": 3.1546050821156486e-05, |
|
"loss": 1.6832, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.020703933747412, |
|
"grad_norm": 0.14147016406059265, |
|
"learning_rate": 3.1340058490195815e-05, |
|
"loss": 1.6109, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.0248447204968945, |
|
"grad_norm": 0.09276167303323746, |
|
"learning_rate": 3.113400283216587e-05, |
|
"loss": 1.7716, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.0289855072463767, |
|
"grad_norm": 0.21230283379554749, |
|
"learning_rate": 3.0927893584627405e-05, |
|
"loss": 1.6087, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.0331262939958592, |
|
"grad_norm": 0.115353524684906, |
|
"learning_rate": 3.0721740487673606e-05, |
|
"loss": 1.6034, |
|
"step": 250 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 482, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 25, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.302325934758298e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|