diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,169681 @@ +{ + "best_global_step": null, + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 1.0, + "eval_steps": 500, + "global_step": 24234, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 4.126433935792688e-05, + "grad_norm": 8.242608210589822, + "learning_rate": 4.120879120879121e-09, + "loss": 1.0685, + "step": 1 + }, + { + "epoch": 8.252867871585376e-05, + "grad_norm": 7.43083584301719, + "learning_rate": 8.241758241758242e-09, + "loss": 1.1365, + "step": 2 + }, + { + "epoch": 0.00012379301807378064, + "grad_norm": 13.27830641949066, + "learning_rate": 1.2362637362637363e-08, + "loss": 1.0835, + "step": 3 + }, + { + "epoch": 0.00016505735743170752, + "grad_norm": 9.827891885892281, + "learning_rate": 1.6483516483516484e-08, + "loss": 1.1121, + "step": 4 + }, + { + "epoch": 0.0002063216967896344, + "grad_norm": 8.83877441923349, + "learning_rate": 2.0604395604395605e-08, + "loss": 1.1013, + "step": 5 + }, + { + "epoch": 0.0002475860361475613, + "grad_norm": 13.982503432531052, + "learning_rate": 2.4725274725274727e-08, + "loss": 1.1441, + "step": 6 + }, + { + "epoch": 0.00028885037550548814, + "grad_norm": 8.758488341240888, + "learning_rate": 2.8846153846153848e-08, + "loss": 1.0729, + "step": 7 + }, + { + "epoch": 0.00033011471486341505, + "grad_norm": 17.417314843206345, + "learning_rate": 3.296703296703297e-08, + "loss": 1.0659, + "step": 8 + }, + { + "epoch": 0.0003713790542213419, + "grad_norm": 20.213554567241378, + "learning_rate": 3.7087912087912087e-08, + "loss": 1.0806, + "step": 9 + }, + { + "epoch": 0.0004126433935792688, + "grad_norm": 11.609105866655131, + "learning_rate": 4.120879120879121e-08, + "loss": 1.106, + "step": 10 + }, + { + "epoch": 0.00045390773293719567, + "grad_norm": 9.957325762565445, + "learning_rate": 4.532967032967033e-08, + "loss": 1.0542, + "step": 11 + }, + { + "epoch": 0.0004951720722951226, + "grad_norm": 8.313234335587612, + "learning_rate": 4.945054945054945e-08, + "loss": 1.099, + "step": 12 + }, + { + "epoch": 0.0005364364116530495, + "grad_norm": 12.07220373248911, + "learning_rate": 5.357142857142857e-08, + "loss": 1.0608, + "step": 13 + }, + { + "epoch": 0.0005777007510109763, + "grad_norm": 11.152420279003984, + "learning_rate": 5.7692307692307695e-08, + "loss": 1.0711, + "step": 14 + }, + { + "epoch": 0.0006189650903689032, + "grad_norm": 8.086651179201171, + "learning_rate": 6.181318681318682e-08, + "loss": 1.1225, + "step": 15 + }, + { + "epoch": 0.0006602294297268301, + "grad_norm": 8.839541455857415, + "learning_rate": 6.593406593406594e-08, + "loss": 1.0349, + "step": 16 + }, + { + "epoch": 0.0007014937690847569, + "grad_norm": 10.971886941674885, + "learning_rate": 7.005494505494506e-08, + "loss": 1.1209, + "step": 17 + }, + { + "epoch": 0.0007427581084426838, + "grad_norm": 7.171037538359772, + "learning_rate": 7.417582417582417e-08, + "loss": 1.0674, + "step": 18 + }, + { + "epoch": 0.0007840224478006107, + "grad_norm": 8.143179089308568, + "learning_rate": 7.82967032967033e-08, + "loss": 1.0636, + "step": 19 + }, + { + "epoch": 0.0008252867871585376, + "grad_norm": 21.279696248417153, + "learning_rate": 8.241758241758242e-08, + "loss": 1.1086, + "step": 20 + }, + { + "epoch": 0.0008665511265164644, + "grad_norm": 10.687486033822225, + "learning_rate": 8.653846153846154e-08, + "loss": 1.055, + "step": 21 + }, + { + "epoch": 0.0009078154658743913, + "grad_norm": 7.432876174626529, + "learning_rate": 9.065934065934066e-08, + "loss": 1.0938, + "step": 22 + }, + { + "epoch": 0.0009490798052323182, + "grad_norm": 9.097692042455797, + "learning_rate": 9.478021978021978e-08, + "loss": 1.1165, + "step": 23 + }, + { + "epoch": 0.0009903441445902451, + "grad_norm": 17.36532737674231, + "learning_rate": 9.89010989010989e-08, + "loss": 1.2077, + "step": 24 + }, + { + "epoch": 0.001031608483948172, + "grad_norm": 8.811472304907632, + "learning_rate": 1.0302197802197804e-07, + "loss": 1.0974, + "step": 25 + }, + { + "epoch": 0.001072872823306099, + "grad_norm": 7.303206145941395, + "learning_rate": 1.0714285714285714e-07, + "loss": 1.1219, + "step": 26 + }, + { + "epoch": 0.0011141371626640258, + "grad_norm": 9.623561387761542, + "learning_rate": 1.1126373626373627e-07, + "loss": 1.1912, + "step": 27 + }, + { + "epoch": 0.0011554015020219526, + "grad_norm": 6.901574910722103, + "learning_rate": 1.1538461538461539e-07, + "loss": 1.0514, + "step": 28 + }, + { + "epoch": 0.0011966658413798796, + "grad_norm": 13.268023045200437, + "learning_rate": 1.195054945054945e-07, + "loss": 1.1101, + "step": 29 + }, + { + "epoch": 0.0012379301807378064, + "grad_norm": 8.863849084228493, + "learning_rate": 1.2362637362637364e-07, + "loss": 1.0746, + "step": 30 + }, + { + "epoch": 0.0012791945200957332, + "grad_norm": 8.80007357206639, + "learning_rate": 1.2774725274725276e-07, + "loss": 1.1211, + "step": 31 + }, + { + "epoch": 0.0013204588594536602, + "grad_norm": 8.613399162134447, + "learning_rate": 1.3186813186813187e-07, + "loss": 1.1222, + "step": 32 + }, + { + "epoch": 0.001361723198811587, + "grad_norm": 6.6693282807081875, + "learning_rate": 1.35989010989011e-07, + "loss": 1.075, + "step": 33 + }, + { + "epoch": 0.0014029875381695138, + "grad_norm": 8.329982954260739, + "learning_rate": 1.401098901098901e-07, + "loss": 1.0689, + "step": 34 + }, + { + "epoch": 0.0014442518775274408, + "grad_norm": 8.359084167961187, + "learning_rate": 1.4423076923076925e-07, + "loss": 1.111, + "step": 35 + }, + { + "epoch": 0.0014855162168853676, + "grad_norm": 8.564952770718858, + "learning_rate": 1.4835164835164835e-07, + "loss": 1.1225, + "step": 36 + }, + { + "epoch": 0.0015267805562432946, + "grad_norm": 7.581851832383527, + "learning_rate": 1.5247252747252746e-07, + "loss": 1.051, + "step": 37 + }, + { + "epoch": 0.0015680448956012214, + "grad_norm": 6.522339160470617, + "learning_rate": 1.565934065934066e-07, + "loss": 1.0087, + "step": 38 + }, + { + "epoch": 0.0016093092349591482, + "grad_norm": 14.265628807053035, + "learning_rate": 1.607142857142857e-07, + "loss": 1.1085, + "step": 39 + }, + { + "epoch": 0.0016505735743170752, + "grad_norm": 7.893943575800663, + "learning_rate": 1.6483516483516484e-07, + "loss": 1.0597, + "step": 40 + }, + { + "epoch": 0.001691837913675002, + "grad_norm": 30.81658106065068, + "learning_rate": 1.6895604395604396e-07, + "loss": 1.0482, + "step": 41 + }, + { + "epoch": 0.0017331022530329288, + "grad_norm": 10.104631169247536, + "learning_rate": 1.7307692307692308e-07, + "loss": 1.1493, + "step": 42 + }, + { + "epoch": 0.0017743665923908559, + "grad_norm": 9.072143972789409, + "learning_rate": 1.771978021978022e-07, + "loss": 1.0751, + "step": 43 + }, + { + "epoch": 0.0018156309317487827, + "grad_norm": 8.17197674100435, + "learning_rate": 1.8131868131868131e-07, + "loss": 1.0607, + "step": 44 + }, + { + "epoch": 0.0018568952711067097, + "grad_norm": 8.255790511515922, + "learning_rate": 1.8543956043956046e-07, + "loss": 1.128, + "step": 45 + }, + { + "epoch": 0.0018981596104646365, + "grad_norm": 16.125954268509677, + "learning_rate": 1.8956043956043955e-07, + "loss": 1.1206, + "step": 46 + }, + { + "epoch": 0.0019394239498225633, + "grad_norm": 9.672006886755911, + "learning_rate": 1.936813186813187e-07, + "loss": 1.0224, + "step": 47 + }, + { + "epoch": 0.0019806882891804903, + "grad_norm": 7.77158848990157, + "learning_rate": 1.978021978021978e-07, + "loss": 1.0883, + "step": 48 + }, + { + "epoch": 0.002021952628538417, + "grad_norm": 8.63743212409685, + "learning_rate": 2.0192307692307693e-07, + "loss": 1.0779, + "step": 49 + }, + { + "epoch": 0.002063216967896344, + "grad_norm": 8.028275424599453, + "learning_rate": 2.0604395604395607e-07, + "loss": 1.0225, + "step": 50 + }, + { + "epoch": 0.0021044813072542707, + "grad_norm": 6.364791791822092, + "learning_rate": 2.1016483516483517e-07, + "loss": 1.0166, + "step": 51 + }, + { + "epoch": 0.002145745646612198, + "grad_norm": 8.363788852486957, + "learning_rate": 2.1428571428571428e-07, + "loss": 1.0006, + "step": 52 + }, + { + "epoch": 0.0021870099859701247, + "grad_norm": 9.46776897698719, + "learning_rate": 2.1840659340659343e-07, + "loss": 1.0473, + "step": 53 + }, + { + "epoch": 0.0022282743253280515, + "grad_norm": 7.631662295890806, + "learning_rate": 2.2252747252747255e-07, + "loss": 1.0596, + "step": 54 + }, + { + "epoch": 0.0022695386646859783, + "grad_norm": 7.461736483403955, + "learning_rate": 2.2664835164835164e-07, + "loss": 1.0779, + "step": 55 + }, + { + "epoch": 0.002310803004043905, + "grad_norm": 6.957161811287934, + "learning_rate": 2.3076923076923078e-07, + "loss": 1.0969, + "step": 56 + }, + { + "epoch": 0.002352067343401832, + "grad_norm": 6.66390849210194, + "learning_rate": 2.348901098901099e-07, + "loss": 1.08, + "step": 57 + }, + { + "epoch": 0.002393331682759759, + "grad_norm": 8.235370387947949, + "learning_rate": 2.39010989010989e-07, + "loss": 1.0431, + "step": 58 + }, + { + "epoch": 0.002434596022117686, + "grad_norm": 11.318573334596614, + "learning_rate": 2.4313186813186813e-07, + "loss": 1.0437, + "step": 59 + }, + { + "epoch": 0.0024758603614756128, + "grad_norm": 6.679690970341524, + "learning_rate": 2.472527472527473e-07, + "loss": 1.0631, + "step": 60 + }, + { + "epoch": 0.0025171247008335396, + "grad_norm": 6.970612174700163, + "learning_rate": 2.5137362637362637e-07, + "loss": 1.0994, + "step": 61 + }, + { + "epoch": 0.0025583890401914664, + "grad_norm": 8.812239862330406, + "learning_rate": 2.554945054945055e-07, + "loss": 1.0519, + "step": 62 + }, + { + "epoch": 0.0025996533795493936, + "grad_norm": 8.715938954939148, + "learning_rate": 2.596153846153846e-07, + "loss": 1.1051, + "step": 63 + }, + { + "epoch": 0.0026409177189073204, + "grad_norm": 6.62434770144183, + "learning_rate": 2.6373626373626375e-07, + "loss": 1.0801, + "step": 64 + }, + { + "epoch": 0.002682182058265247, + "grad_norm": 8.262013084671855, + "learning_rate": 2.678571428571429e-07, + "loss": 1.0314, + "step": 65 + }, + { + "epoch": 0.002723446397623174, + "grad_norm": 20.012326888380887, + "learning_rate": 2.71978021978022e-07, + "loss": 1.0559, + "step": 66 + }, + { + "epoch": 0.002764710736981101, + "grad_norm": 5.39802159780868, + "learning_rate": 2.7609890109890113e-07, + "loss": 0.9389, + "step": 67 + }, + { + "epoch": 0.0028059750763390276, + "grad_norm": 5.885437554712918, + "learning_rate": 2.802197802197802e-07, + "loss": 0.9743, + "step": 68 + }, + { + "epoch": 0.002847239415696955, + "grad_norm": 6.518362209004358, + "learning_rate": 2.843406593406593e-07, + "loss": 0.9736, + "step": 69 + }, + { + "epoch": 0.0028885037550548816, + "grad_norm": 6.208738974386278, + "learning_rate": 2.884615384615385e-07, + "loss": 1.0319, + "step": 70 + }, + { + "epoch": 0.0029297680944128084, + "grad_norm": 5.942235493064442, + "learning_rate": 2.925824175824176e-07, + "loss": 1.0178, + "step": 71 + }, + { + "epoch": 0.0029710324337707352, + "grad_norm": 6.45790117084395, + "learning_rate": 2.967032967032967e-07, + "loss": 1.0119, + "step": 72 + }, + { + "epoch": 0.003012296773128662, + "grad_norm": 4.920379133731763, + "learning_rate": 3.0082417582417584e-07, + "loss": 0.9631, + "step": 73 + }, + { + "epoch": 0.0030535611124865893, + "grad_norm": 218.7785637476485, + "learning_rate": 3.0494505494505493e-07, + "loss": 0.955, + "step": 74 + }, + { + "epoch": 0.003094825451844516, + "grad_norm": 5.583715917758413, + "learning_rate": 3.0906593406593407e-07, + "loss": 1.0481, + "step": 75 + }, + { + "epoch": 0.003136089791202443, + "grad_norm": 5.9121605490262565, + "learning_rate": 3.131868131868132e-07, + "loss": 1.0217, + "step": 76 + }, + { + "epoch": 0.0031773541305603697, + "grad_norm": 6.4174554407966715, + "learning_rate": 3.173076923076923e-07, + "loss": 0.9737, + "step": 77 + }, + { + "epoch": 0.0032186184699182965, + "grad_norm": 6.935339805286985, + "learning_rate": 3.214285714285714e-07, + "loss": 1.0426, + "step": 78 + }, + { + "epoch": 0.0032598828092762237, + "grad_norm": 5.447110200859827, + "learning_rate": 3.2554945054945054e-07, + "loss": 1.0404, + "step": 79 + }, + { + "epoch": 0.0033011471486341505, + "grad_norm": 5.990490392668337, + "learning_rate": 3.296703296703297e-07, + "loss": 0.9897, + "step": 80 + }, + { + "epoch": 0.0033424114879920773, + "grad_norm": 16.743487613203815, + "learning_rate": 3.337912087912088e-07, + "loss": 0.9905, + "step": 81 + }, + { + "epoch": 0.003383675827350004, + "grad_norm": 4.627391014831424, + "learning_rate": 3.379120879120879e-07, + "loss": 1.0067, + "step": 82 + }, + { + "epoch": 0.003424940166707931, + "grad_norm": 6.021206928306847, + "learning_rate": 3.42032967032967e-07, + "loss": 0.9933, + "step": 83 + }, + { + "epoch": 0.0034662045060658577, + "grad_norm": 10.603027785193373, + "learning_rate": 3.4615384615384616e-07, + "loss": 0.9639, + "step": 84 + }, + { + "epoch": 0.003507468845423785, + "grad_norm": 7.066132507782455, + "learning_rate": 3.502747252747253e-07, + "loss": 0.9697, + "step": 85 + }, + { + "epoch": 0.0035487331847817117, + "grad_norm": 7.083991741744826, + "learning_rate": 3.543956043956044e-07, + "loss": 1.0089, + "step": 86 + }, + { + "epoch": 0.0035899975241396385, + "grad_norm": 7.027310762429928, + "learning_rate": 3.5851648351648354e-07, + "loss": 0.9857, + "step": 87 + }, + { + "epoch": 0.0036312618634975653, + "grad_norm": 6.399220237565812, + "learning_rate": 3.6263736263736263e-07, + "loss": 0.9978, + "step": 88 + }, + { + "epoch": 0.003672526202855492, + "grad_norm": 5.133532967341583, + "learning_rate": 3.667582417582418e-07, + "loss": 0.9169, + "step": 89 + }, + { + "epoch": 0.0037137905422134194, + "grad_norm": 5.854821562538349, + "learning_rate": 3.708791208791209e-07, + "loss": 1.0101, + "step": 90 + }, + { + "epoch": 0.003755054881571346, + "grad_norm": 10.947157165249346, + "learning_rate": 3.75e-07, + "loss": 0.9866, + "step": 91 + }, + { + "epoch": 0.003796319220929273, + "grad_norm": 7.851196197051947, + "learning_rate": 3.791208791208791e-07, + "loss": 0.9322, + "step": 92 + }, + { + "epoch": 0.0038375835602871998, + "grad_norm": 17.30198411944494, + "learning_rate": 3.8324175824175825e-07, + "loss": 0.9806, + "step": 93 + }, + { + "epoch": 0.0038788478996451266, + "grad_norm": 4.968680716161868, + "learning_rate": 3.873626373626374e-07, + "loss": 0.8679, + "step": 94 + }, + { + "epoch": 0.003920112239003053, + "grad_norm": 4.725790737012779, + "learning_rate": 3.9148351648351653e-07, + "loss": 0.982, + "step": 95 + }, + { + "epoch": 0.003961376578360981, + "grad_norm": 6.304856446711222, + "learning_rate": 3.956043956043956e-07, + "loss": 0.9236, + "step": 96 + }, + { + "epoch": 0.004002640917718907, + "grad_norm": 6.282425359249271, + "learning_rate": 3.997252747252747e-07, + "loss": 0.9593, + "step": 97 + }, + { + "epoch": 0.004043905257076834, + "grad_norm": 6.9127987520248, + "learning_rate": 4.0384615384615386e-07, + "loss": 0.8815, + "step": 98 + }, + { + "epoch": 0.004085169596434761, + "grad_norm": 5.763594631186707, + "learning_rate": 4.0796703296703295e-07, + "loss": 0.8662, + "step": 99 + }, + { + "epoch": 0.004126433935792688, + "grad_norm": 4.685882907810937, + "learning_rate": 4.1208791208791215e-07, + "loss": 0.9111, + "step": 100 + }, + { + "epoch": 0.004167698275150615, + "grad_norm": 4.178431218296427, + "learning_rate": 4.1620879120879124e-07, + "loss": 0.8313, + "step": 101 + }, + { + "epoch": 0.004208962614508541, + "grad_norm": 6.9539187433734995, + "learning_rate": 4.2032967032967033e-07, + "loss": 0.9289, + "step": 102 + }, + { + "epoch": 0.004250226953866469, + "grad_norm": 4.389276104782548, + "learning_rate": 4.244505494505495e-07, + "loss": 0.9403, + "step": 103 + }, + { + "epoch": 0.004291491293224396, + "grad_norm": 12.267086560494288, + "learning_rate": 4.2857142857142857e-07, + "loss": 0.8955, + "step": 104 + }, + { + "epoch": 0.004332755632582322, + "grad_norm": 4.924037969874312, + "learning_rate": 4.3269230769230766e-07, + "loss": 0.9233, + "step": 105 + }, + { + "epoch": 0.0043740199719402495, + "grad_norm": 5.79094817599042, + "learning_rate": 4.3681318681318686e-07, + "loss": 0.8592, + "step": 106 + }, + { + "epoch": 0.004415284311298176, + "grad_norm": 5.6371985789129635, + "learning_rate": 4.4093406593406595e-07, + "loss": 0.9613, + "step": 107 + }, + { + "epoch": 0.004456548650656103, + "grad_norm": 5.8919323065561375, + "learning_rate": 4.450549450549451e-07, + "loss": 0.8526, + "step": 108 + }, + { + "epoch": 0.00449781299001403, + "grad_norm": 8.658096587580264, + "learning_rate": 4.491758241758242e-07, + "loss": 0.9038, + "step": 109 + }, + { + "epoch": 0.004539077329371957, + "grad_norm": 5.015448428334948, + "learning_rate": 4.532967032967033e-07, + "loss": 0.8888, + "step": 110 + }, + { + "epoch": 0.004580341668729884, + "grad_norm": 5.609834128429832, + "learning_rate": 4.5741758241758247e-07, + "loss": 0.9155, + "step": 111 + }, + { + "epoch": 0.00462160600808781, + "grad_norm": 3.9383127156319264, + "learning_rate": 4.6153846153846156e-07, + "loss": 0.8922, + "step": 112 + }, + { + "epoch": 0.0046628703474457375, + "grad_norm": 4.804537642639363, + "learning_rate": 4.656593406593407e-07, + "loss": 0.8749, + "step": 113 + }, + { + "epoch": 0.004704134686803664, + "grad_norm": 26.06427321671884, + "learning_rate": 4.697802197802198e-07, + "loss": 0.8302, + "step": 114 + }, + { + "epoch": 0.004745399026161591, + "grad_norm": 4.448337292248011, + "learning_rate": 4.739010989010989e-07, + "loss": 0.8945, + "step": 115 + }, + { + "epoch": 0.004786663365519518, + "grad_norm": 3.4532700196839956, + "learning_rate": 4.78021978021978e-07, + "loss": 0.8725, + "step": 116 + }, + { + "epoch": 0.004827927704877445, + "grad_norm": 4.305732463307678, + "learning_rate": 4.821428571428572e-07, + "loss": 0.8974, + "step": 117 + }, + { + "epoch": 0.004869192044235372, + "grad_norm": 4.073668922957326, + "learning_rate": 4.862637362637363e-07, + "loss": 0.8805, + "step": 118 + }, + { + "epoch": 0.004910456383593298, + "grad_norm": 12.879761522394181, + "learning_rate": 4.903846153846154e-07, + "loss": 0.8789, + "step": 119 + }, + { + "epoch": 0.0049517207229512255, + "grad_norm": 4.159631964423869, + "learning_rate": 4.945054945054946e-07, + "loss": 0.8881, + "step": 120 + }, + { + "epoch": 0.004992985062309153, + "grad_norm": 4.8260098560901685, + "learning_rate": 4.986263736263736e-07, + "loss": 0.8515, + "step": 121 + }, + { + "epoch": 0.005034249401667079, + "grad_norm": 7.546370478396311, + "learning_rate": 5.027472527472527e-07, + "loss": 0.909, + "step": 122 + }, + { + "epoch": 0.005075513741025006, + "grad_norm": 5.6836626372003405, + "learning_rate": 5.068681318681319e-07, + "loss": 0.8536, + "step": 123 + }, + { + "epoch": 0.005116778080382933, + "grad_norm": 4.18933778233775, + "learning_rate": 5.10989010989011e-07, + "loss": 0.9667, + "step": 124 + }, + { + "epoch": 0.00515804241974086, + "grad_norm": 4.01882326939688, + "learning_rate": 5.151098901098902e-07, + "loss": 0.8493, + "step": 125 + }, + { + "epoch": 0.005199306759098787, + "grad_norm": 5.366659865955302, + "learning_rate": 5.192307692307692e-07, + "loss": 0.8617, + "step": 126 + }, + { + "epoch": 0.0052405710984567136, + "grad_norm": 4.646638530339263, + "learning_rate": 5.233516483516484e-07, + "loss": 0.8649, + "step": 127 + }, + { + "epoch": 0.005281835437814641, + "grad_norm": 6.3710170108615545, + "learning_rate": 5.274725274725275e-07, + "loss": 0.8468, + "step": 128 + }, + { + "epoch": 0.005323099777172567, + "grad_norm": 3.598891229771939, + "learning_rate": 5.315934065934066e-07, + "loss": 0.893, + "step": 129 + }, + { + "epoch": 0.005364364116530494, + "grad_norm": 8.483139459383569, + "learning_rate": 5.357142857142858e-07, + "loss": 0.9466, + "step": 130 + }, + { + "epoch": 0.005405628455888422, + "grad_norm": 3.5038337827981225, + "learning_rate": 5.398351648351648e-07, + "loss": 0.8497, + "step": 131 + }, + { + "epoch": 0.005446892795246348, + "grad_norm": 10.000368168408091, + "learning_rate": 5.43956043956044e-07, + "loss": 0.8852, + "step": 132 + }, + { + "epoch": 0.005488157134604275, + "grad_norm": 11.365889709210334, + "learning_rate": 5.48076923076923e-07, + "loss": 0.907, + "step": 133 + }, + { + "epoch": 0.005529421473962202, + "grad_norm": 4.028536020042712, + "learning_rate": 5.521978021978023e-07, + "loss": 0.8312, + "step": 134 + }, + { + "epoch": 0.005570685813320129, + "grad_norm": 2.9804758589759, + "learning_rate": 5.563186813186814e-07, + "loss": 0.864, + "step": 135 + }, + { + "epoch": 0.005611950152678055, + "grad_norm": 3.9447706886612517, + "learning_rate": 5.604395604395604e-07, + "loss": 0.805, + "step": 136 + }, + { + "epoch": 0.005653214492035982, + "grad_norm": 11.87414465869481, + "learning_rate": 5.645604395604396e-07, + "loss": 0.8063, + "step": 137 + }, + { + "epoch": 0.00569447883139391, + "grad_norm": 22.450543541796367, + "learning_rate": 5.686813186813186e-07, + "loss": 0.8688, + "step": 138 + }, + { + "epoch": 0.005735743170751836, + "grad_norm": 5.672991132200073, + "learning_rate": 5.728021978021978e-07, + "loss": 0.8566, + "step": 139 + }, + { + "epoch": 0.005777007510109763, + "grad_norm": 6.13431202678171, + "learning_rate": 5.76923076923077e-07, + "loss": 0.8284, + "step": 140 + }, + { + "epoch": 0.00581827184946769, + "grad_norm": 4.029203879590772, + "learning_rate": 5.810439560439561e-07, + "loss": 0.8062, + "step": 141 + }, + { + "epoch": 0.005859536188825617, + "grad_norm": 5.241273798778208, + "learning_rate": 5.851648351648352e-07, + "loss": 0.8831, + "step": 142 + }, + { + "epoch": 0.005900800528183544, + "grad_norm": 3.967813711022004, + "learning_rate": 5.892857142857142e-07, + "loss": 0.8062, + "step": 143 + }, + { + "epoch": 0.0059420648675414705, + "grad_norm": 8.376336252645043, + "learning_rate": 5.934065934065934e-07, + "loss": 0.8394, + "step": 144 + }, + { + "epoch": 0.005983329206899398, + "grad_norm": 7.07107355966253, + "learning_rate": 5.975274725274725e-07, + "loss": 0.9174, + "step": 145 + }, + { + "epoch": 0.006024593546257324, + "grad_norm": 3.8089901750363304, + "learning_rate": 6.016483516483517e-07, + "loss": 0.7843, + "step": 146 + }, + { + "epoch": 0.006065857885615251, + "grad_norm": 4.39918514591187, + "learning_rate": 6.057692307692308e-07, + "loss": 0.941, + "step": 147 + }, + { + "epoch": 0.0061071222249731785, + "grad_norm": 5.193131770447455, + "learning_rate": 6.098901098901099e-07, + "loss": 0.8383, + "step": 148 + }, + { + "epoch": 0.006148386564331105, + "grad_norm": 3.449021644158018, + "learning_rate": 6.14010989010989e-07, + "loss": 0.8724, + "step": 149 + }, + { + "epoch": 0.006189650903689032, + "grad_norm": 4.878635466886926, + "learning_rate": 6.181318681318681e-07, + "loss": 0.8116, + "step": 150 + }, + { + "epoch": 0.0062309152430469585, + "grad_norm": 3.4940365628157344, + "learning_rate": 6.222527472527473e-07, + "loss": 0.907, + "step": 151 + }, + { + "epoch": 0.006272179582404886, + "grad_norm": 6.236050092441801, + "learning_rate": 6.263736263736264e-07, + "loss": 0.7732, + "step": 152 + }, + { + "epoch": 0.006313443921762813, + "grad_norm": 11.829894726134496, + "learning_rate": 6.304945054945055e-07, + "loss": 0.7774, + "step": 153 + }, + { + "epoch": 0.006354708261120739, + "grad_norm": 3.3425569100297223, + "learning_rate": 6.346153846153846e-07, + "loss": 0.8181, + "step": 154 + }, + { + "epoch": 0.0063959726004786666, + "grad_norm": 4.995126611226978, + "learning_rate": 6.387362637362638e-07, + "loss": 0.9045, + "step": 155 + }, + { + "epoch": 0.006437236939836593, + "grad_norm": 4.962181148567067, + "learning_rate": 6.428571428571428e-07, + "loss": 0.787, + "step": 156 + }, + { + "epoch": 0.00647850127919452, + "grad_norm": 5.6787928786107535, + "learning_rate": 6.46978021978022e-07, + "loss": 0.8551, + "step": 157 + }, + { + "epoch": 0.006519765618552447, + "grad_norm": 4.880828558541638, + "learning_rate": 6.510989010989011e-07, + "loss": 0.8156, + "step": 158 + }, + { + "epoch": 0.006561029957910374, + "grad_norm": 5.574320944775972, + "learning_rate": 6.552197802197802e-07, + "loss": 0.7922, + "step": 159 + }, + { + "epoch": 0.006602294297268301, + "grad_norm": 8.867267345228555, + "learning_rate": 6.593406593406594e-07, + "loss": 0.8305, + "step": 160 + }, + { + "epoch": 0.006643558636626227, + "grad_norm": 118.87175756835373, + "learning_rate": 6.634615384615384e-07, + "loss": 0.8249, + "step": 161 + }, + { + "epoch": 0.006684822975984155, + "grad_norm": 3.837798578629879, + "learning_rate": 6.675824175824176e-07, + "loss": 0.8767, + "step": 162 + }, + { + "epoch": 0.006726087315342081, + "grad_norm": 3.0259908705036573, + "learning_rate": 6.717032967032967e-07, + "loss": 0.7935, + "step": 163 + }, + { + "epoch": 0.006767351654700008, + "grad_norm": 5.485739244489659, + "learning_rate": 6.758241758241758e-07, + "loss": 0.808, + "step": 164 + }, + { + "epoch": 0.006808615994057935, + "grad_norm": 3.086538085163352, + "learning_rate": 6.79945054945055e-07, + "loss": 0.7882, + "step": 165 + }, + { + "epoch": 0.006849880333415862, + "grad_norm": 4.392347428533502, + "learning_rate": 6.84065934065934e-07, + "loss": 0.7639, + "step": 166 + }, + { + "epoch": 0.006891144672773789, + "grad_norm": 4.617899783116438, + "learning_rate": 6.881868131868132e-07, + "loss": 0.8122, + "step": 167 + }, + { + "epoch": 0.006932409012131715, + "grad_norm": 4.077367550061966, + "learning_rate": 6.923076923076923e-07, + "loss": 0.8132, + "step": 168 + }, + { + "epoch": 0.006973673351489643, + "grad_norm": 4.216381920281421, + "learning_rate": 6.964285714285715e-07, + "loss": 0.7704, + "step": 169 + }, + { + "epoch": 0.00701493769084757, + "grad_norm": 6.037202696339174, + "learning_rate": 7.005494505494506e-07, + "loss": 0.8241, + "step": 170 + }, + { + "epoch": 0.007056202030205496, + "grad_norm": 3.6371888308876517, + "learning_rate": 7.046703296703296e-07, + "loss": 0.7879, + "step": 171 + }, + { + "epoch": 0.0070974663695634235, + "grad_norm": 3.8022033308405634, + "learning_rate": 7.087912087912088e-07, + "loss": 0.8121, + "step": 172 + }, + { + "epoch": 0.00713873070892135, + "grad_norm": 6.090966705427151, + "learning_rate": 7.129120879120879e-07, + "loss": 0.9032, + "step": 173 + }, + { + "epoch": 0.007179995048279277, + "grad_norm": 4.232390596015529, + "learning_rate": 7.170329670329671e-07, + "loss": 0.7485, + "step": 174 + }, + { + "epoch": 0.007221259387637204, + "grad_norm": 5.936856270113892, + "learning_rate": 7.211538461538462e-07, + "loss": 0.8233, + "step": 175 + }, + { + "epoch": 0.007262523726995131, + "grad_norm": 5.970000246312331, + "learning_rate": 7.252747252747253e-07, + "loss": 0.7424, + "step": 176 + }, + { + "epoch": 0.007303788066353058, + "grad_norm": 3.1776780505119935, + "learning_rate": 7.293956043956044e-07, + "loss": 0.7741, + "step": 177 + }, + { + "epoch": 0.007345052405710984, + "grad_norm": 6.710859216998897, + "learning_rate": 7.335164835164835e-07, + "loss": 0.8729, + "step": 178 + }, + { + "epoch": 0.0073863167450689115, + "grad_norm": 7.355837702024899, + "learning_rate": 7.376373626373626e-07, + "loss": 0.7954, + "step": 179 + }, + { + "epoch": 0.007427581084426839, + "grad_norm": 3.9285353440034125, + "learning_rate": 7.417582417582418e-07, + "loss": 0.8016, + "step": 180 + }, + { + "epoch": 0.007468845423784765, + "grad_norm": 4.0020075708877005, + "learning_rate": 7.458791208791209e-07, + "loss": 0.811, + "step": 181 + }, + { + "epoch": 0.007510109763142692, + "grad_norm": 4.9055243508335336, + "learning_rate": 7.5e-07, + "loss": 0.7389, + "step": 182 + }, + { + "epoch": 0.007551374102500619, + "grad_norm": 3.2873109837334384, + "learning_rate": 7.541208791208792e-07, + "loss": 0.7854, + "step": 183 + }, + { + "epoch": 0.007592638441858546, + "grad_norm": 4.8306474324226745, + "learning_rate": 7.582417582417582e-07, + "loss": 0.7514, + "step": 184 + }, + { + "epoch": 0.007633902781216473, + "grad_norm": 2.6898319591104194, + "learning_rate": 7.623626373626373e-07, + "loss": 0.7925, + "step": 185 + }, + { + "epoch": 0.0076751671205743995, + "grad_norm": 6.124672798489177, + "learning_rate": 7.664835164835165e-07, + "loss": 0.7961, + "step": 186 + }, + { + "epoch": 0.007716431459932327, + "grad_norm": 6.150800832039733, + "learning_rate": 7.706043956043955e-07, + "loss": 0.7727, + "step": 187 + }, + { + "epoch": 0.007757695799290253, + "grad_norm": 7.3561120133015745, + "learning_rate": 7.747252747252748e-07, + "loss": 0.7944, + "step": 188 + }, + { + "epoch": 0.00779896013864818, + "grad_norm": 6.597042069651635, + "learning_rate": 7.788461538461539e-07, + "loss": 0.8272, + "step": 189 + }, + { + "epoch": 0.007840224478006107, + "grad_norm": 3.824399484427559, + "learning_rate": 7.829670329670331e-07, + "loss": 0.7914, + "step": 190 + }, + { + "epoch": 0.007881488817364035, + "grad_norm": 3.2599158730343833, + "learning_rate": 7.870879120879121e-07, + "loss": 0.7621, + "step": 191 + }, + { + "epoch": 0.007922753156721961, + "grad_norm": 4.4952758767649454, + "learning_rate": 7.912087912087912e-07, + "loss": 0.8066, + "step": 192 + }, + { + "epoch": 0.007964017496079888, + "grad_norm": 3.151696919372039, + "learning_rate": 7.953296703296704e-07, + "loss": 0.7754, + "step": 193 + }, + { + "epoch": 0.008005281835437814, + "grad_norm": 5.661185034577049, + "learning_rate": 7.994505494505494e-07, + "loss": 0.8378, + "step": 194 + }, + { + "epoch": 0.008046546174795742, + "grad_norm": 5.748775983868368, + "learning_rate": 8.035714285714286e-07, + "loss": 0.766, + "step": 195 + }, + { + "epoch": 0.008087810514153668, + "grad_norm": 3.4455964461210757, + "learning_rate": 8.076923076923077e-07, + "loss": 0.7503, + "step": 196 + }, + { + "epoch": 0.008129074853511595, + "grad_norm": 9.37321229314045, + "learning_rate": 8.118131868131868e-07, + "loss": 0.7281, + "step": 197 + }, + { + "epoch": 0.008170339192869523, + "grad_norm": 4.573395249981507, + "learning_rate": 8.159340659340659e-07, + "loss": 0.7507, + "step": 198 + }, + { + "epoch": 0.00821160353222745, + "grad_norm": 2.625540126721013, + "learning_rate": 8.200549450549449e-07, + "loss": 0.7508, + "step": 199 + }, + { + "epoch": 0.008252867871585376, + "grad_norm": 3.911791948331881, + "learning_rate": 8.241758241758243e-07, + "loss": 0.839, + "step": 200 + }, + { + "epoch": 0.008294132210943302, + "grad_norm": 6.936122137806975, + "learning_rate": 8.282967032967033e-07, + "loss": 0.7876, + "step": 201 + }, + { + "epoch": 0.00833539655030123, + "grad_norm": 2.8034871810196687, + "learning_rate": 8.324175824175825e-07, + "loss": 0.75, + "step": 202 + }, + { + "epoch": 0.008376660889659156, + "grad_norm": 3.8561550613752176, + "learning_rate": 8.365384615384616e-07, + "loss": 0.8135, + "step": 203 + }, + { + "epoch": 0.008417925229017083, + "grad_norm": 3.2360828879672625, + "learning_rate": 8.406593406593407e-07, + "loss": 0.7325, + "step": 204 + }, + { + "epoch": 0.008459189568375011, + "grad_norm": 4.179340186579164, + "learning_rate": 8.447802197802198e-07, + "loss": 0.7894, + "step": 205 + }, + { + "epoch": 0.008500453907732937, + "grad_norm": 2.267843845848005, + "learning_rate": 8.48901098901099e-07, + "loss": 0.7259, + "step": 206 + }, + { + "epoch": 0.008541718247090864, + "grad_norm": 9.720880918817148, + "learning_rate": 8.53021978021978e-07, + "loss": 0.8459, + "step": 207 + }, + { + "epoch": 0.008582982586448792, + "grad_norm": 5.391551221690282, + "learning_rate": 8.571428571428571e-07, + "loss": 0.8044, + "step": 208 + }, + { + "epoch": 0.008624246925806718, + "grad_norm": 5.286257332826359, + "learning_rate": 8.612637362637362e-07, + "loss": 0.7768, + "step": 209 + }, + { + "epoch": 0.008665511265164644, + "grad_norm": 13.791181003048493, + "learning_rate": 8.653846153846153e-07, + "loss": 0.7311, + "step": 210 + }, + { + "epoch": 0.00870677560452257, + "grad_norm": 5.287187413168146, + "learning_rate": 8.695054945054946e-07, + "loss": 0.753, + "step": 211 + }, + { + "epoch": 0.008748039943880499, + "grad_norm": 6.7138008245796295, + "learning_rate": 8.736263736263737e-07, + "loss": 0.7711, + "step": 212 + }, + { + "epoch": 0.008789304283238425, + "grad_norm": 3.9901836699423483, + "learning_rate": 8.777472527472529e-07, + "loss": 0.7631, + "step": 213 + }, + { + "epoch": 0.008830568622596352, + "grad_norm": 25.644417865162367, + "learning_rate": 8.818681318681319e-07, + "loss": 0.7032, + "step": 214 + }, + { + "epoch": 0.00887183296195428, + "grad_norm": 6.87594542998449, + "learning_rate": 8.85989010989011e-07, + "loss": 0.7978, + "step": 215 + }, + { + "epoch": 0.008913097301312206, + "grad_norm": 10.116046251334678, + "learning_rate": 8.901098901098902e-07, + "loss": 0.7743, + "step": 216 + }, + { + "epoch": 0.008954361640670132, + "grad_norm": 4.270040123682421, + "learning_rate": 8.942307692307692e-07, + "loss": 0.7489, + "step": 217 + }, + { + "epoch": 0.00899562598002806, + "grad_norm": 4.362590585741929, + "learning_rate": 8.983516483516484e-07, + "loss": 0.7827, + "step": 218 + }, + { + "epoch": 0.009036890319385987, + "grad_norm": 4.152152095047632, + "learning_rate": 9.024725274725274e-07, + "loss": 0.7677, + "step": 219 + }, + { + "epoch": 0.009078154658743913, + "grad_norm": 7.141162702046867, + "learning_rate": 9.065934065934065e-07, + "loss": 0.7241, + "step": 220 + }, + { + "epoch": 0.00911941899810184, + "grad_norm": 5.274491187947778, + "learning_rate": 9.107142857142857e-07, + "loss": 0.7549, + "step": 221 + }, + { + "epoch": 0.009160683337459768, + "grad_norm": 9.016615402940959, + "learning_rate": 9.148351648351649e-07, + "loss": 0.7932, + "step": 222 + }, + { + "epoch": 0.009201947676817694, + "grad_norm": 4.634673985524846, + "learning_rate": 9.189560439560441e-07, + "loss": 0.7493, + "step": 223 + }, + { + "epoch": 0.00924321201617562, + "grad_norm": 4.382261274909839, + "learning_rate": 9.230769230769231e-07, + "loss": 0.7489, + "step": 224 + }, + { + "epoch": 0.009284476355533549, + "grad_norm": 5.148254037632924, + "learning_rate": 9.271978021978023e-07, + "loss": 0.6489, + "step": 225 + }, + { + "epoch": 0.009325740694891475, + "grad_norm": 4.190010870115071, + "learning_rate": 9.313186813186814e-07, + "loss": 0.7126, + "step": 226 + }, + { + "epoch": 0.009367005034249401, + "grad_norm": 6.819951936760976, + "learning_rate": 9.354395604395605e-07, + "loss": 0.756, + "step": 227 + }, + { + "epoch": 0.009408269373607328, + "grad_norm": 4.642043251313882, + "learning_rate": 9.395604395604396e-07, + "loss": 0.773, + "step": 228 + }, + { + "epoch": 0.009449533712965256, + "grad_norm": 3.887742753266055, + "learning_rate": 9.436813186813186e-07, + "loss": 0.7218, + "step": 229 + }, + { + "epoch": 0.009490798052323182, + "grad_norm": 13.200280147153682, + "learning_rate": 9.478021978021978e-07, + "loss": 0.7492, + "step": 230 + }, + { + "epoch": 0.009532062391681109, + "grad_norm": 10.799972519050526, + "learning_rate": 9.519230769230769e-07, + "loss": 0.685, + "step": 231 + }, + { + "epoch": 0.009573326731039037, + "grad_norm": 3.679866497167092, + "learning_rate": 9.56043956043956e-07, + "loss": 0.8479, + "step": 232 + }, + { + "epoch": 0.009614591070396963, + "grad_norm": 2.756573565563366, + "learning_rate": 9.60164835164835e-07, + "loss": 0.7151, + "step": 233 + }, + { + "epoch": 0.00965585540975489, + "grad_norm": 3.5247166625245097, + "learning_rate": 9.642857142857145e-07, + "loss": 0.8002, + "step": 234 + }, + { + "epoch": 0.009697119749112817, + "grad_norm": 5.136220396240947, + "learning_rate": 9.684065934065934e-07, + "loss": 0.7012, + "step": 235 + }, + { + "epoch": 0.009738384088470744, + "grad_norm": 4.217243617591909, + "learning_rate": 9.725274725274725e-07, + "loss": 0.8186, + "step": 236 + }, + { + "epoch": 0.00977964842782867, + "grad_norm": 3.5772160892165914, + "learning_rate": 9.766483516483517e-07, + "loss": 0.7189, + "step": 237 + }, + { + "epoch": 0.009820912767186597, + "grad_norm": 5.733892839037828, + "learning_rate": 9.807692307692308e-07, + "loss": 0.7302, + "step": 238 + }, + { + "epoch": 0.009862177106544525, + "grad_norm": 7.034413856304292, + "learning_rate": 9.8489010989011e-07, + "loss": 0.7504, + "step": 239 + }, + { + "epoch": 0.009903441445902451, + "grad_norm": 5.871597009967852, + "learning_rate": 9.890109890109891e-07, + "loss": 0.768, + "step": 240 + }, + { + "epoch": 0.009944705785260377, + "grad_norm": 3.251897285107038, + "learning_rate": 9.93131868131868e-07, + "loss": 0.7101, + "step": 241 + }, + { + "epoch": 0.009985970124618306, + "grad_norm": 3.979396149868531, + "learning_rate": 9.972527472527472e-07, + "loss": 0.7485, + "step": 242 + }, + { + "epoch": 0.010027234463976232, + "grad_norm": 3.2454985251220414, + "learning_rate": 1.0013736263736263e-06, + "loss": 0.7304, + "step": 243 + }, + { + "epoch": 0.010068498803334158, + "grad_norm": 4.661198344701955, + "learning_rate": 1.0054945054945055e-06, + "loss": 0.7622, + "step": 244 + }, + { + "epoch": 0.010109763142692086, + "grad_norm": 6.679486492871103, + "learning_rate": 1.0096153846153846e-06, + "loss": 0.7232, + "step": 245 + }, + { + "epoch": 0.010151027482050013, + "grad_norm": 3.899883837005097, + "learning_rate": 1.0137362637362638e-06, + "loss": 0.6873, + "step": 246 + }, + { + "epoch": 0.010192291821407939, + "grad_norm": 27.857775922636637, + "learning_rate": 1.017857142857143e-06, + "loss": 0.6397, + "step": 247 + }, + { + "epoch": 0.010233556160765865, + "grad_norm": 4.712646911324358, + "learning_rate": 1.021978021978022e-06, + "loss": 0.7323, + "step": 248 + }, + { + "epoch": 0.010274820500123794, + "grad_norm": 3.8244827473612086, + "learning_rate": 1.0260989010989012e-06, + "loss": 0.8122, + "step": 249 + }, + { + "epoch": 0.01031608483948172, + "grad_norm": 4.243742409454839, + "learning_rate": 1.0302197802197803e-06, + "loss": 0.7001, + "step": 250 + }, + { + "epoch": 0.010357349178839646, + "grad_norm": 5.260971565077228, + "learning_rate": 1.0343406593406593e-06, + "loss": 0.7832, + "step": 251 + }, + { + "epoch": 0.010398613518197574, + "grad_norm": 4.048992451232943, + "learning_rate": 1.0384615384615384e-06, + "loss": 0.7647, + "step": 252 + }, + { + "epoch": 0.0104398778575555, + "grad_norm": 4.82813582371373, + "learning_rate": 1.0425824175824176e-06, + "loss": 0.7858, + "step": 253 + }, + { + "epoch": 0.010481142196913427, + "grad_norm": 6.985854242440201, + "learning_rate": 1.0467032967032967e-06, + "loss": 0.6963, + "step": 254 + }, + { + "epoch": 0.010522406536271353, + "grad_norm": 5.926611192227958, + "learning_rate": 1.0508241758241759e-06, + "loss": 0.7607, + "step": 255 + }, + { + "epoch": 0.010563670875629282, + "grad_norm": 3.3878374540505223, + "learning_rate": 1.054945054945055e-06, + "loss": 0.6578, + "step": 256 + }, + { + "epoch": 0.010604935214987208, + "grad_norm": 3.3232439736523873, + "learning_rate": 1.0590659340659341e-06, + "loss": 0.8163, + "step": 257 + }, + { + "epoch": 0.010646199554345134, + "grad_norm": 2.6510648470491742, + "learning_rate": 1.0631868131868133e-06, + "loss": 0.6404, + "step": 258 + }, + { + "epoch": 0.010687463893703062, + "grad_norm": 3.056750106088686, + "learning_rate": 1.0673076923076924e-06, + "loss": 0.6991, + "step": 259 + }, + { + "epoch": 0.010728728233060989, + "grad_norm": 3.6690613166862374, + "learning_rate": 1.0714285714285716e-06, + "loss": 0.6887, + "step": 260 + }, + { + "epoch": 0.010769992572418915, + "grad_norm": 3.5929435652574355, + "learning_rate": 1.0755494505494505e-06, + "loss": 0.7823, + "step": 261 + }, + { + "epoch": 0.010811256911776843, + "grad_norm": 8.990151421073453, + "learning_rate": 1.0796703296703297e-06, + "loss": 0.7547, + "step": 262 + }, + { + "epoch": 0.01085252125113477, + "grad_norm": 31.16492925545466, + "learning_rate": 1.0837912087912088e-06, + "loss": 0.7519, + "step": 263 + }, + { + "epoch": 0.010893785590492696, + "grad_norm": 5.882835952502649, + "learning_rate": 1.087912087912088e-06, + "loss": 0.6955, + "step": 264 + }, + { + "epoch": 0.010935049929850622, + "grad_norm": 4.0948239153897035, + "learning_rate": 1.092032967032967e-06, + "loss": 0.7143, + "step": 265 + }, + { + "epoch": 0.01097631426920855, + "grad_norm": 4.954807422211041, + "learning_rate": 1.096153846153846e-06, + "loss": 0.6941, + "step": 266 + }, + { + "epoch": 0.011017578608566477, + "grad_norm": 4.852687432130691, + "learning_rate": 1.1002747252747252e-06, + "loss": 0.691, + "step": 267 + }, + { + "epoch": 0.011058842947924403, + "grad_norm": 3.407747841986233, + "learning_rate": 1.1043956043956045e-06, + "loss": 0.6855, + "step": 268 + }, + { + "epoch": 0.011100107287282331, + "grad_norm": 32.98428144373742, + "learning_rate": 1.1085164835164837e-06, + "loss": 0.6802, + "step": 269 + }, + { + "epoch": 0.011141371626640258, + "grad_norm": 6.826774923504285, + "learning_rate": 1.1126373626373628e-06, + "loss": 0.6848, + "step": 270 + }, + { + "epoch": 0.011182635965998184, + "grad_norm": 4.0419739471513125, + "learning_rate": 1.1167582417582417e-06, + "loss": 0.7359, + "step": 271 + }, + { + "epoch": 0.01122390030535611, + "grad_norm": 7.3849473298516415, + "learning_rate": 1.1208791208791209e-06, + "loss": 0.723, + "step": 272 + }, + { + "epoch": 0.011265164644714038, + "grad_norm": 2.8311435265424976, + "learning_rate": 1.125e-06, + "loss": 0.76, + "step": 273 + }, + { + "epoch": 0.011306428984071965, + "grad_norm": 21.833069867190332, + "learning_rate": 1.1291208791208792e-06, + "loss": 0.7043, + "step": 274 + }, + { + "epoch": 0.011347693323429891, + "grad_norm": 4.337074885967037, + "learning_rate": 1.1332417582417583e-06, + "loss": 0.7336, + "step": 275 + }, + { + "epoch": 0.01138895766278782, + "grad_norm": 4.58546859318241, + "learning_rate": 1.1373626373626372e-06, + "loss": 0.7313, + "step": 276 + }, + { + "epoch": 0.011430222002145746, + "grad_norm": 7.81501387691681, + "learning_rate": 1.1414835164835164e-06, + "loss": 0.6518, + "step": 277 + }, + { + "epoch": 0.011471486341503672, + "grad_norm": 5.616095349331087, + "learning_rate": 1.1456043956043955e-06, + "loss": 0.7411, + "step": 278 + }, + { + "epoch": 0.0115127506808616, + "grad_norm": 5.840806835368438, + "learning_rate": 1.1497252747252749e-06, + "loss": 0.8306, + "step": 279 + }, + { + "epoch": 0.011554015020219527, + "grad_norm": 4.6329066471821685, + "learning_rate": 1.153846153846154e-06, + "loss": 0.7487, + "step": 280 + }, + { + "epoch": 0.011595279359577453, + "grad_norm": 4.280078888851114, + "learning_rate": 1.157967032967033e-06, + "loss": 0.7268, + "step": 281 + }, + { + "epoch": 0.01163654369893538, + "grad_norm": 6.05404333711398, + "learning_rate": 1.1620879120879121e-06, + "loss": 0.7608, + "step": 282 + }, + { + "epoch": 0.011677808038293307, + "grad_norm": 10.854701218691211, + "learning_rate": 1.1662087912087913e-06, + "loss": 0.6834, + "step": 283 + }, + { + "epoch": 0.011719072377651234, + "grad_norm": 10.243375468234714, + "learning_rate": 1.1703296703296704e-06, + "loss": 0.7387, + "step": 284 + }, + { + "epoch": 0.01176033671700916, + "grad_norm": 3.418212667028618, + "learning_rate": 1.1744505494505495e-06, + "loss": 0.6587, + "step": 285 + }, + { + "epoch": 0.011801601056367088, + "grad_norm": 3.9317263502043103, + "learning_rate": 1.1785714285714285e-06, + "loss": 0.7412, + "step": 286 + }, + { + "epoch": 0.011842865395725015, + "grad_norm": 22.30014814595495, + "learning_rate": 1.1826923076923076e-06, + "loss": 0.6959, + "step": 287 + }, + { + "epoch": 0.011884129735082941, + "grad_norm": 16.616753911553293, + "learning_rate": 1.1868131868131868e-06, + "loss": 0.7162, + "step": 288 + }, + { + "epoch": 0.011925394074440869, + "grad_norm": 5.459062387361622, + "learning_rate": 1.190934065934066e-06, + "loss": 0.7588, + "step": 289 + }, + { + "epoch": 0.011966658413798795, + "grad_norm": 4.479773311942631, + "learning_rate": 1.195054945054945e-06, + "loss": 0.7287, + "step": 290 + }, + { + "epoch": 0.012007922753156722, + "grad_norm": 7.8543392747139045, + "learning_rate": 1.1991758241758242e-06, + "loss": 0.746, + "step": 291 + }, + { + "epoch": 0.012049187092514648, + "grad_norm": 4.57151507442665, + "learning_rate": 1.2032967032967033e-06, + "loss": 0.6687, + "step": 292 + }, + { + "epoch": 0.012090451431872576, + "grad_norm": 5.1150141833092935, + "learning_rate": 1.2074175824175825e-06, + "loss": 0.6789, + "step": 293 + }, + { + "epoch": 0.012131715771230503, + "grad_norm": 7.753613970283237, + "learning_rate": 1.2115384615384616e-06, + "loss": 0.7392, + "step": 294 + }, + { + "epoch": 0.012172980110588429, + "grad_norm": 2.574238565870184, + "learning_rate": 1.2156593406593408e-06, + "loss": 0.6931, + "step": 295 + }, + { + "epoch": 0.012214244449946357, + "grad_norm": 3.469502503790684, + "learning_rate": 1.2197802197802197e-06, + "loss": 0.6972, + "step": 296 + }, + { + "epoch": 0.012255508789304283, + "grad_norm": 4.812163872451661, + "learning_rate": 1.2239010989010989e-06, + "loss": 0.6931, + "step": 297 + }, + { + "epoch": 0.01229677312866221, + "grad_norm": 4.478357774185953, + "learning_rate": 1.228021978021978e-06, + "loss": 0.7001, + "step": 298 + }, + { + "epoch": 0.012338037468020136, + "grad_norm": 3.977446333237777, + "learning_rate": 1.2321428571428571e-06, + "loss": 0.7099, + "step": 299 + }, + { + "epoch": 0.012379301807378064, + "grad_norm": 13.07860861203165, + "learning_rate": 1.2362637362637363e-06, + "loss": 0.7215, + "step": 300 + }, + { + "epoch": 0.01242056614673599, + "grad_norm": 3.1236708270661455, + "learning_rate": 1.2403846153846154e-06, + "loss": 0.6993, + "step": 301 + }, + { + "epoch": 0.012461830486093917, + "grad_norm": 3.9073451219207582, + "learning_rate": 1.2445054945054946e-06, + "loss": 0.6774, + "step": 302 + }, + { + "epoch": 0.012503094825451845, + "grad_norm": 15.054240222222154, + "learning_rate": 1.2486263736263737e-06, + "loss": 0.7103, + "step": 303 + }, + { + "epoch": 0.012544359164809771, + "grad_norm": 5.742944659181562, + "learning_rate": 1.2527472527472529e-06, + "loss": 0.6875, + "step": 304 + }, + { + "epoch": 0.012585623504167698, + "grad_norm": 4.295282063369656, + "learning_rate": 1.256868131868132e-06, + "loss": 0.6814, + "step": 305 + }, + { + "epoch": 0.012626887843525626, + "grad_norm": 6.664358017343417, + "learning_rate": 1.260989010989011e-06, + "loss": 0.743, + "step": 306 + }, + { + "epoch": 0.012668152182883552, + "grad_norm": 3.841053411879456, + "learning_rate": 1.26510989010989e-06, + "loss": 0.6849, + "step": 307 + }, + { + "epoch": 0.012709416522241479, + "grad_norm": 7.301114756677183, + "learning_rate": 1.2692307692307692e-06, + "loss": 0.7355, + "step": 308 + }, + { + "epoch": 0.012750680861599405, + "grad_norm": 4.418524390615772, + "learning_rate": 1.2733516483516484e-06, + "loss": 0.7049, + "step": 309 + }, + { + "epoch": 0.012791945200957333, + "grad_norm": 4.5581748962811375, + "learning_rate": 1.2774725274725275e-06, + "loss": 0.7193, + "step": 310 + }, + { + "epoch": 0.01283320954031526, + "grad_norm": 7.552849585286646, + "learning_rate": 1.2815934065934067e-06, + "loss": 0.7403, + "step": 311 + }, + { + "epoch": 0.012874473879673186, + "grad_norm": 3.80233196227529, + "learning_rate": 1.2857142857142856e-06, + "loss": 0.7284, + "step": 312 + }, + { + "epoch": 0.012915738219031114, + "grad_norm": 26.838705269695435, + "learning_rate": 1.289835164835165e-06, + "loss": 0.6797, + "step": 313 + }, + { + "epoch": 0.01295700255838904, + "grad_norm": 4.347218743068407, + "learning_rate": 1.293956043956044e-06, + "loss": 0.773, + "step": 314 + }, + { + "epoch": 0.012998266897746967, + "grad_norm": 3.8075384946745734, + "learning_rate": 1.2980769230769232e-06, + "loss": 0.6757, + "step": 315 + }, + { + "epoch": 0.013039531237104895, + "grad_norm": 4.246612072267864, + "learning_rate": 1.3021978021978022e-06, + "loss": 0.7312, + "step": 316 + }, + { + "epoch": 0.013080795576462821, + "grad_norm": 6.673677017584647, + "learning_rate": 1.3063186813186813e-06, + "loss": 0.6367, + "step": 317 + }, + { + "epoch": 0.013122059915820748, + "grad_norm": 3.146653200519079, + "learning_rate": 1.3104395604395605e-06, + "loss": 0.6798, + "step": 318 + }, + { + "epoch": 0.013163324255178674, + "grad_norm": 4.859463683110573, + "learning_rate": 1.3145604395604396e-06, + "loss": 0.6625, + "step": 319 + }, + { + "epoch": 0.013204588594536602, + "grad_norm": 4.70996461455086, + "learning_rate": 1.3186813186813187e-06, + "loss": 0.7398, + "step": 320 + }, + { + "epoch": 0.013245852933894528, + "grad_norm": 3.5416294199774354, + "learning_rate": 1.3228021978021979e-06, + "loss": 0.6492, + "step": 321 + }, + { + "epoch": 0.013287117273252455, + "grad_norm": 4.943336777776729, + "learning_rate": 1.3269230769230768e-06, + "loss": 0.6898, + "step": 322 + }, + { + "epoch": 0.013328381612610383, + "grad_norm": 3.964345248523024, + "learning_rate": 1.331043956043956e-06, + "loss": 0.6823, + "step": 323 + }, + { + "epoch": 0.01336964595196831, + "grad_norm": 4.125098650084872, + "learning_rate": 1.3351648351648351e-06, + "loss": 0.7052, + "step": 324 + }, + { + "epoch": 0.013410910291326236, + "grad_norm": 8.140125068041996, + "learning_rate": 1.3392857142857145e-06, + "loss": 0.7142, + "step": 325 + }, + { + "epoch": 0.013452174630684162, + "grad_norm": 5.359401132320518, + "learning_rate": 1.3434065934065934e-06, + "loss": 0.7482, + "step": 326 + }, + { + "epoch": 0.01349343897004209, + "grad_norm": 4.8662650071887805, + "learning_rate": 1.3475274725274725e-06, + "loss": 0.6719, + "step": 327 + }, + { + "epoch": 0.013534703309400016, + "grad_norm": 4.58370348027786, + "learning_rate": 1.3516483516483517e-06, + "loss": 0.6566, + "step": 328 + }, + { + "epoch": 0.013575967648757943, + "grad_norm": 14.252479895304948, + "learning_rate": 1.3557692307692308e-06, + "loss": 0.7387, + "step": 329 + }, + { + "epoch": 0.01361723198811587, + "grad_norm": 12.181391840440092, + "learning_rate": 1.35989010989011e-06, + "loss": 0.7105, + "step": 330 + }, + { + "epoch": 0.013658496327473797, + "grad_norm": 4.575326812801101, + "learning_rate": 1.3640109890109891e-06, + "loss": 0.6624, + "step": 331 + }, + { + "epoch": 0.013699760666831724, + "grad_norm": 5.257281633433936, + "learning_rate": 1.368131868131868e-06, + "loss": 0.7595, + "step": 332 + }, + { + "epoch": 0.013741025006189652, + "grad_norm": 5.253446735679375, + "learning_rate": 1.3722527472527472e-06, + "loss": 0.6169, + "step": 333 + }, + { + "epoch": 0.013782289345547578, + "grad_norm": 5.0676199546064415, + "learning_rate": 1.3763736263736263e-06, + "loss": 0.7036, + "step": 334 + }, + { + "epoch": 0.013823553684905504, + "grad_norm": 5.103751259267002, + "learning_rate": 1.3804945054945055e-06, + "loss": 0.6642, + "step": 335 + }, + { + "epoch": 0.01386481802426343, + "grad_norm": 6.499830903202336, + "learning_rate": 1.3846153846153846e-06, + "loss": 0.7306, + "step": 336 + }, + { + "epoch": 0.013906082363621359, + "grad_norm": 4.730916728197067, + "learning_rate": 1.3887362637362638e-06, + "loss": 0.6681, + "step": 337 + }, + { + "epoch": 0.013947346702979285, + "grad_norm": 6.434673847022691, + "learning_rate": 1.392857142857143e-06, + "loss": 0.6262, + "step": 338 + }, + { + "epoch": 0.013988611042337212, + "grad_norm": 6.419005646215202, + "learning_rate": 1.396978021978022e-06, + "loss": 0.7095, + "step": 339 + }, + { + "epoch": 0.01402987538169514, + "grad_norm": 5.70109012802996, + "learning_rate": 1.4010989010989012e-06, + "loss": 0.656, + "step": 340 + }, + { + "epoch": 0.014071139721053066, + "grad_norm": 4.425332986821944, + "learning_rate": 1.4052197802197804e-06, + "loss": 0.7329, + "step": 341 + }, + { + "epoch": 0.014112404060410992, + "grad_norm": 5.428227901025103, + "learning_rate": 1.4093406593406593e-06, + "loss": 0.6567, + "step": 342 + }, + { + "epoch": 0.01415366839976892, + "grad_norm": 3.3138868169097258, + "learning_rate": 1.4134615384615384e-06, + "loss": 0.6293, + "step": 343 + }, + { + "epoch": 0.014194932739126847, + "grad_norm": 6.798359721609697, + "learning_rate": 1.4175824175824176e-06, + "loss": 0.6929, + "step": 344 + }, + { + "epoch": 0.014236197078484773, + "grad_norm": 4.756771141421686, + "learning_rate": 1.4217032967032967e-06, + "loss": 0.7637, + "step": 345 + }, + { + "epoch": 0.0142774614178427, + "grad_norm": 4.472716843921156, + "learning_rate": 1.4258241758241759e-06, + "loss": 0.7005, + "step": 346 + }, + { + "epoch": 0.014318725757200628, + "grad_norm": 12.806931895251607, + "learning_rate": 1.429945054945055e-06, + "loss": 0.6367, + "step": 347 + }, + { + "epoch": 0.014359990096558554, + "grad_norm": 3.809016141459815, + "learning_rate": 1.4340659340659342e-06, + "loss": 0.627, + "step": 348 + }, + { + "epoch": 0.01440125443591648, + "grad_norm": 6.270949786565847, + "learning_rate": 1.4381868131868133e-06, + "loss": 0.6806, + "step": 349 + }, + { + "epoch": 0.014442518775274409, + "grad_norm": 2.913208445415563, + "learning_rate": 1.4423076923076924e-06, + "loss": 0.6381, + "step": 350 + }, + { + "epoch": 0.014483783114632335, + "grad_norm": 3.528068808586537, + "learning_rate": 1.4464285714285716e-06, + "loss": 0.6945, + "step": 351 + }, + { + "epoch": 0.014525047453990261, + "grad_norm": 4.159069655285337, + "learning_rate": 1.4505494505494505e-06, + "loss": 0.6742, + "step": 352 + }, + { + "epoch": 0.014566311793348188, + "grad_norm": 4.00442373707568, + "learning_rate": 1.4546703296703297e-06, + "loss": 0.6836, + "step": 353 + }, + { + "epoch": 0.014607576132706116, + "grad_norm": 4.174339791986612, + "learning_rate": 1.4587912087912088e-06, + "loss": 0.6838, + "step": 354 + }, + { + "epoch": 0.014648840472064042, + "grad_norm": 4.8642943244554635, + "learning_rate": 1.462912087912088e-06, + "loss": 0.6613, + "step": 355 + }, + { + "epoch": 0.014690104811421969, + "grad_norm": 3.9700495845618127, + "learning_rate": 1.467032967032967e-06, + "loss": 0.6654, + "step": 356 + }, + { + "epoch": 0.014731369150779897, + "grad_norm": 2.961135579612071, + "learning_rate": 1.471153846153846e-06, + "loss": 0.7001, + "step": 357 + }, + { + "epoch": 0.014772633490137823, + "grad_norm": 7.124774667670057, + "learning_rate": 1.4752747252747252e-06, + "loss": 0.6208, + "step": 358 + }, + { + "epoch": 0.01481389782949575, + "grad_norm": 19.013088210854896, + "learning_rate": 1.4793956043956045e-06, + "loss": 0.723, + "step": 359 + }, + { + "epoch": 0.014855162168853677, + "grad_norm": 4.711747145537383, + "learning_rate": 1.4835164835164837e-06, + "loss": 0.6478, + "step": 360 + }, + { + "epoch": 0.014896426508211604, + "grad_norm": 9.43521272037813, + "learning_rate": 1.4876373626373628e-06, + "loss": 0.7006, + "step": 361 + }, + { + "epoch": 0.01493769084756953, + "grad_norm": 5.783270010934842, + "learning_rate": 1.4917582417582417e-06, + "loss": 0.6836, + "step": 362 + }, + { + "epoch": 0.014978955186927457, + "grad_norm": 17.57803414071563, + "learning_rate": 1.4958791208791209e-06, + "loss": 0.7151, + "step": 363 + }, + { + "epoch": 0.015020219526285385, + "grad_norm": 3.283051918359883, + "learning_rate": 1.5e-06, + "loss": 0.7261, + "step": 364 + }, + { + "epoch": 0.015061483865643311, + "grad_norm": 3.564009132719922, + "learning_rate": 1.5041208791208792e-06, + "loss": 0.7384, + "step": 365 + }, + { + "epoch": 0.015102748205001237, + "grad_norm": 4.87677926792507, + "learning_rate": 1.5082417582417583e-06, + "loss": 0.7062, + "step": 366 + }, + { + "epoch": 0.015144012544359165, + "grad_norm": 6.454650147941147, + "learning_rate": 1.5123626373626373e-06, + "loss": 0.7022, + "step": 367 + }, + { + "epoch": 0.015185276883717092, + "grad_norm": 13.349772535545895, + "learning_rate": 1.5164835164835164e-06, + "loss": 0.6852, + "step": 368 + }, + { + "epoch": 0.015226541223075018, + "grad_norm": 74.50245911804883, + "learning_rate": 1.5206043956043955e-06, + "loss": 0.6394, + "step": 369 + }, + { + "epoch": 0.015267805562432946, + "grad_norm": 3.901477179778363, + "learning_rate": 1.5247252747252747e-06, + "loss": 0.6132, + "step": 370 + }, + { + "epoch": 0.015309069901790873, + "grad_norm": 4.32166337562483, + "learning_rate": 1.5288461538461538e-06, + "loss": 0.7045, + "step": 371 + }, + { + "epoch": 0.015350334241148799, + "grad_norm": 6.538597609109458, + "learning_rate": 1.532967032967033e-06, + "loss": 0.6839, + "step": 372 + }, + { + "epoch": 0.015391598580506725, + "grad_norm": 4.005241235433792, + "learning_rate": 1.537087912087912e-06, + "loss": 0.6723, + "step": 373 + }, + { + "epoch": 0.015432862919864654, + "grad_norm": 6.698199427643886, + "learning_rate": 1.541208791208791e-06, + "loss": 0.6642, + "step": 374 + }, + { + "epoch": 0.01547412725922258, + "grad_norm": 3.2046191829841058, + "learning_rate": 1.5453296703296702e-06, + "loss": 0.716, + "step": 375 + }, + { + "epoch": 0.015515391598580506, + "grad_norm": 3.5412518769024115, + "learning_rate": 1.5494505494505496e-06, + "loss": 0.7016, + "step": 376 + }, + { + "epoch": 0.015556655937938434, + "grad_norm": 10.028828126483832, + "learning_rate": 1.5535714285714287e-06, + "loss": 0.6811, + "step": 377 + }, + { + "epoch": 0.01559792027729636, + "grad_norm": 4.483415749807318, + "learning_rate": 1.5576923076923078e-06, + "loss": 0.7301, + "step": 378 + }, + { + "epoch": 0.015639184616654287, + "grad_norm": 3.134588061291489, + "learning_rate": 1.561813186813187e-06, + "loss": 0.6882, + "step": 379 + }, + { + "epoch": 0.015680448956012213, + "grad_norm": 8.908361766607825, + "learning_rate": 1.5659340659340661e-06, + "loss": 0.642, + "step": 380 + }, + { + "epoch": 0.01572171329537014, + "grad_norm": 4.710971182942258, + "learning_rate": 1.5700549450549453e-06, + "loss": 0.6636, + "step": 381 + }, + { + "epoch": 0.01576297763472807, + "grad_norm": 3.17141878281581, + "learning_rate": 1.5741758241758242e-06, + "loss": 0.6798, + "step": 382 + }, + { + "epoch": 0.015804241974085996, + "grad_norm": 4.258596027530967, + "learning_rate": 1.5782967032967034e-06, + "loss": 0.6738, + "step": 383 + }, + { + "epoch": 0.015845506313443922, + "grad_norm": 3.650964843232771, + "learning_rate": 1.5824175824175825e-06, + "loss": 0.6315, + "step": 384 + }, + { + "epoch": 0.01588677065280185, + "grad_norm": 5.705349710651111, + "learning_rate": 1.5865384615384616e-06, + "loss": 0.6505, + "step": 385 + }, + { + "epoch": 0.015928034992159775, + "grad_norm": 3.937889636494073, + "learning_rate": 1.5906593406593408e-06, + "loss": 0.7268, + "step": 386 + }, + { + "epoch": 0.0159692993315177, + "grad_norm": 20.439464871295357, + "learning_rate": 1.5947802197802197e-06, + "loss": 0.662, + "step": 387 + }, + { + "epoch": 0.016010563670875628, + "grad_norm": 4.448815912550065, + "learning_rate": 1.5989010989010989e-06, + "loss": 0.7051, + "step": 388 + }, + { + "epoch": 0.016051828010233558, + "grad_norm": 5.4219329431230125, + "learning_rate": 1.603021978021978e-06, + "loss": 0.7964, + "step": 389 + }, + { + "epoch": 0.016093092349591484, + "grad_norm": 4.938388256336878, + "learning_rate": 1.6071428571428572e-06, + "loss": 0.695, + "step": 390 + }, + { + "epoch": 0.01613435668894941, + "grad_norm": 3.626342467656575, + "learning_rate": 1.6112637362637363e-06, + "loss": 0.6415, + "step": 391 + }, + { + "epoch": 0.016175621028307337, + "grad_norm": 10.514473057401602, + "learning_rate": 1.6153846153846154e-06, + "loss": 0.7118, + "step": 392 + }, + { + "epoch": 0.016216885367665263, + "grad_norm": 4.960858484638559, + "learning_rate": 1.6195054945054944e-06, + "loss": 0.6857, + "step": 393 + }, + { + "epoch": 0.01625814970702319, + "grad_norm": 3.154688735654521, + "learning_rate": 1.6236263736263735e-06, + "loss": 0.7184, + "step": 394 + }, + { + "epoch": 0.016299414046381116, + "grad_norm": 3.4118266128678436, + "learning_rate": 1.6277472527472527e-06, + "loss": 0.6287, + "step": 395 + }, + { + "epoch": 0.016340678385739046, + "grad_norm": 7.195328888221302, + "learning_rate": 1.6318681318681318e-06, + "loss": 0.7005, + "step": 396 + }, + { + "epoch": 0.016381942725096972, + "grad_norm": 3.344507489523249, + "learning_rate": 1.635989010989011e-06, + "loss": 0.7157, + "step": 397 + }, + { + "epoch": 0.0164232070644549, + "grad_norm": 4.428500164147598, + "learning_rate": 1.6401098901098899e-06, + "loss": 0.6518, + "step": 398 + }, + { + "epoch": 0.016464471403812825, + "grad_norm": 3.9480104953543336, + "learning_rate": 1.6442307692307695e-06, + "loss": 0.6721, + "step": 399 + }, + { + "epoch": 0.01650573574317075, + "grad_norm": 4.46341190552485, + "learning_rate": 1.6483516483516486e-06, + "loss": 0.5934, + "step": 400 + }, + { + "epoch": 0.016547000082528678, + "grad_norm": 4.467528226697316, + "learning_rate": 1.6524725274725277e-06, + "loss": 0.6976, + "step": 401 + }, + { + "epoch": 0.016588264421886604, + "grad_norm": 5.698985946663071, + "learning_rate": 1.6565934065934067e-06, + "loss": 0.7095, + "step": 402 + }, + { + "epoch": 0.016629528761244534, + "grad_norm": 5.258514458841521, + "learning_rate": 1.6607142857142858e-06, + "loss": 0.7073, + "step": 403 + }, + { + "epoch": 0.01667079310060246, + "grad_norm": 10.457109120188644, + "learning_rate": 1.664835164835165e-06, + "loss": 0.6659, + "step": 404 + }, + { + "epoch": 0.016712057439960386, + "grad_norm": 5.108742352813702, + "learning_rate": 1.668956043956044e-06, + "loss": 0.6594, + "step": 405 + }, + { + "epoch": 0.016753321779318313, + "grad_norm": 5.097321034774005, + "learning_rate": 1.6730769230769232e-06, + "loss": 0.6652, + "step": 406 + }, + { + "epoch": 0.01679458611867624, + "grad_norm": 5.225402620582436, + "learning_rate": 1.6771978021978022e-06, + "loss": 0.6746, + "step": 407 + }, + { + "epoch": 0.016835850458034166, + "grad_norm": 4.629379338712925, + "learning_rate": 1.6813186813186813e-06, + "loss": 0.6527, + "step": 408 + }, + { + "epoch": 0.016877114797392095, + "grad_norm": 3.9018051618188516, + "learning_rate": 1.6854395604395605e-06, + "loss": 0.6377, + "step": 409 + }, + { + "epoch": 0.016918379136750022, + "grad_norm": 2.859235448490461, + "learning_rate": 1.6895604395604396e-06, + "loss": 0.6926, + "step": 410 + }, + { + "epoch": 0.016959643476107948, + "grad_norm": 3.662509232475961, + "learning_rate": 1.6936813186813188e-06, + "loss": 0.667, + "step": 411 + }, + { + "epoch": 0.017000907815465875, + "grad_norm": 6.714210296754268, + "learning_rate": 1.697802197802198e-06, + "loss": 0.6937, + "step": 412 + }, + { + "epoch": 0.0170421721548238, + "grad_norm": 4.82367762079917, + "learning_rate": 1.7019230769230768e-06, + "loss": 0.6385, + "step": 413 + }, + { + "epoch": 0.017083436494181727, + "grad_norm": 3.8894757494414356, + "learning_rate": 1.706043956043956e-06, + "loss": 0.7879, + "step": 414 + }, + { + "epoch": 0.017124700833539654, + "grad_norm": 5.157592209368292, + "learning_rate": 1.7101648351648351e-06, + "loss": 0.7208, + "step": 415 + }, + { + "epoch": 0.017165965172897583, + "grad_norm": 6.99345498859089, + "learning_rate": 1.7142857142857143e-06, + "loss": 0.6958, + "step": 416 + }, + { + "epoch": 0.01720722951225551, + "grad_norm": 5.803998048558785, + "learning_rate": 1.7184065934065934e-06, + "loss": 0.6405, + "step": 417 + }, + { + "epoch": 0.017248493851613436, + "grad_norm": 3.440834557092815, + "learning_rate": 1.7225274725274723e-06, + "loss": 0.6798, + "step": 418 + }, + { + "epoch": 0.017289758190971363, + "grad_norm": 4.9956535880857045, + "learning_rate": 1.7266483516483515e-06, + "loss": 0.6574, + "step": 419 + }, + { + "epoch": 0.01733102253032929, + "grad_norm": 5.508364530578417, + "learning_rate": 1.7307692307692306e-06, + "loss": 0.7136, + "step": 420 + }, + { + "epoch": 0.017372286869687215, + "grad_norm": 4.613150974388976, + "learning_rate": 1.7348901098901102e-06, + "loss": 0.6475, + "step": 421 + }, + { + "epoch": 0.01741355120904514, + "grad_norm": 5.205351254676049, + "learning_rate": 1.7390109890109891e-06, + "loss": 0.6144, + "step": 422 + }, + { + "epoch": 0.01745481554840307, + "grad_norm": 5.7673197997927765, + "learning_rate": 1.7431318681318683e-06, + "loss": 0.6682, + "step": 423 + }, + { + "epoch": 0.017496079887760998, + "grad_norm": 4.285922507628434, + "learning_rate": 1.7472527472527474e-06, + "loss": 0.6908, + "step": 424 + }, + { + "epoch": 0.017537344227118924, + "grad_norm": 2.628831481554016, + "learning_rate": 1.7513736263736266e-06, + "loss": 0.6456, + "step": 425 + }, + { + "epoch": 0.01757860856647685, + "grad_norm": 3.988019699281374, + "learning_rate": 1.7554945054945057e-06, + "loss": 0.6774, + "step": 426 + }, + { + "epoch": 0.017619872905834777, + "grad_norm": 4.509416838064583, + "learning_rate": 1.7596153846153846e-06, + "loss": 0.6996, + "step": 427 + }, + { + "epoch": 0.017661137245192703, + "grad_norm": 4.661315759557038, + "learning_rate": 1.7637362637362638e-06, + "loss": 0.6079, + "step": 428 + }, + { + "epoch": 0.01770240158455063, + "grad_norm": 6.90475507629846, + "learning_rate": 1.767857142857143e-06, + "loss": 0.6741, + "step": 429 + }, + { + "epoch": 0.01774366592390856, + "grad_norm": 3.7157496475231757, + "learning_rate": 1.771978021978022e-06, + "loss": 0.6726, + "step": 430 + }, + { + "epoch": 0.017784930263266486, + "grad_norm": 2.167812361250473, + "learning_rate": 1.7760989010989012e-06, + "loss": 0.631, + "step": 431 + }, + { + "epoch": 0.017826194602624412, + "grad_norm": 3.1591045408144733, + "learning_rate": 1.7802197802197804e-06, + "loss": 0.6241, + "step": 432 + }, + { + "epoch": 0.01786745894198234, + "grad_norm": 4.14961924178319, + "learning_rate": 1.7843406593406593e-06, + "loss": 0.7066, + "step": 433 + }, + { + "epoch": 0.017908723281340265, + "grad_norm": 3.8443741426261404, + "learning_rate": 1.7884615384615384e-06, + "loss": 0.6772, + "step": 434 + }, + { + "epoch": 0.01794998762069819, + "grad_norm": 4.359229517863907, + "learning_rate": 1.7925824175824176e-06, + "loss": 0.6364, + "step": 435 + }, + { + "epoch": 0.01799125196005612, + "grad_norm": 3.234683686512525, + "learning_rate": 1.7967032967032967e-06, + "loss": 0.6761, + "step": 436 + }, + { + "epoch": 0.018032516299414048, + "grad_norm": 16.760228610223027, + "learning_rate": 1.8008241758241759e-06, + "loss": 0.6451, + "step": 437 + }, + { + "epoch": 0.018073780638771974, + "grad_norm": 5.086328733715403, + "learning_rate": 1.8049450549450548e-06, + "loss": 0.6742, + "step": 438 + }, + { + "epoch": 0.0181150449781299, + "grad_norm": 3.049865345547356, + "learning_rate": 1.809065934065934e-06, + "loss": 0.6515, + "step": 439 + }, + { + "epoch": 0.018156309317487827, + "grad_norm": 6.663263467262178, + "learning_rate": 1.813186813186813e-06, + "loss": 0.6774, + "step": 440 + }, + { + "epoch": 0.018197573656845753, + "grad_norm": 3.766607102383643, + "learning_rate": 1.8173076923076922e-06, + "loss": 0.6647, + "step": 441 + }, + { + "epoch": 0.01823883799620368, + "grad_norm": 29.586151095222007, + "learning_rate": 1.8214285714285714e-06, + "loss": 0.6482, + "step": 442 + }, + { + "epoch": 0.01828010233556161, + "grad_norm": 6.853168679528594, + "learning_rate": 1.8255494505494505e-06, + "loss": 0.7767, + "step": 443 + }, + { + "epoch": 0.018321366674919536, + "grad_norm": 4.276326560059131, + "learning_rate": 1.8296703296703299e-06, + "loss": 0.7544, + "step": 444 + }, + { + "epoch": 0.018362631014277462, + "grad_norm": 8.177178935224472, + "learning_rate": 1.833791208791209e-06, + "loss": 0.6667, + "step": 445 + }, + { + "epoch": 0.01840389535363539, + "grad_norm": 3.4030244225754585, + "learning_rate": 1.8379120879120882e-06, + "loss": 0.6408, + "step": 446 + }, + { + "epoch": 0.018445159692993315, + "grad_norm": 5.853766804180086, + "learning_rate": 1.842032967032967e-06, + "loss": 0.7, + "step": 447 + }, + { + "epoch": 0.01848642403235124, + "grad_norm": 3.279536800386592, + "learning_rate": 1.8461538461538462e-06, + "loss": 0.6585, + "step": 448 + }, + { + "epoch": 0.018527688371709167, + "grad_norm": 5.999382639738716, + "learning_rate": 1.8502747252747254e-06, + "loss": 0.6828, + "step": 449 + }, + { + "epoch": 0.018568952711067097, + "grad_norm": 3.940114072633091, + "learning_rate": 1.8543956043956045e-06, + "loss": 0.7168, + "step": 450 + }, + { + "epoch": 0.018610217050425024, + "grad_norm": 31.058939598990534, + "learning_rate": 1.8585164835164837e-06, + "loss": 0.6711, + "step": 451 + }, + { + "epoch": 0.01865148138978295, + "grad_norm": 3.343596594311348, + "learning_rate": 1.8626373626373628e-06, + "loss": 0.671, + "step": 452 + }, + { + "epoch": 0.018692745729140876, + "grad_norm": 4.742504005053895, + "learning_rate": 1.8667582417582418e-06, + "loss": 0.6281, + "step": 453 + }, + { + "epoch": 0.018734010068498803, + "grad_norm": 4.779855730726901, + "learning_rate": 1.870879120879121e-06, + "loss": 0.7015, + "step": 454 + }, + { + "epoch": 0.01877527440785673, + "grad_norm": 4.534605821217475, + "learning_rate": 1.875e-06, + "loss": 0.6379, + "step": 455 + }, + { + "epoch": 0.018816538747214655, + "grad_norm": 4.5369962330454525, + "learning_rate": 1.8791208791208792e-06, + "loss": 0.6538, + "step": 456 + }, + { + "epoch": 0.018857803086572585, + "grad_norm": 6.391805531733061, + "learning_rate": 1.8832417582417583e-06, + "loss": 0.6348, + "step": 457 + }, + { + "epoch": 0.01889906742593051, + "grad_norm": 4.278272609048062, + "learning_rate": 1.8873626373626373e-06, + "loss": 0.6249, + "step": 458 + }, + { + "epoch": 0.018940331765288438, + "grad_norm": 4.606178953245819, + "learning_rate": 1.8914835164835164e-06, + "loss": 0.6343, + "step": 459 + }, + { + "epoch": 0.018981596104646364, + "grad_norm": 5.293691509456402, + "learning_rate": 1.8956043956043956e-06, + "loss": 0.6258, + "step": 460 + }, + { + "epoch": 0.01902286044400429, + "grad_norm": 4.195165219338947, + "learning_rate": 1.8997252747252747e-06, + "loss": 0.6569, + "step": 461 + }, + { + "epoch": 0.019064124783362217, + "grad_norm": 3.8816282548743386, + "learning_rate": 1.9038461538461538e-06, + "loss": 0.6941, + "step": 462 + }, + { + "epoch": 0.019105389122720147, + "grad_norm": 8.63374759370109, + "learning_rate": 1.907967032967033e-06, + "loss": 0.5767, + "step": 463 + }, + { + "epoch": 0.019146653462078073, + "grad_norm": 7.044097835670315, + "learning_rate": 1.912087912087912e-06, + "loss": 0.6462, + "step": 464 + }, + { + "epoch": 0.019187917801436, + "grad_norm": 3.516571480782412, + "learning_rate": 1.9162087912087913e-06, + "loss": 0.6394, + "step": 465 + }, + { + "epoch": 0.019229182140793926, + "grad_norm": 8.417447062361003, + "learning_rate": 1.92032967032967e-06, + "loss": 0.6067, + "step": 466 + }, + { + "epoch": 0.019270446480151852, + "grad_norm": 3.0626835112656674, + "learning_rate": 1.9244505494505496e-06, + "loss": 0.6708, + "step": 467 + }, + { + "epoch": 0.01931171081950978, + "grad_norm": 29.273639980034776, + "learning_rate": 1.928571428571429e-06, + "loss": 0.7408, + "step": 468 + }, + { + "epoch": 0.019352975158867705, + "grad_norm": 3.737019354254259, + "learning_rate": 1.932692307692308e-06, + "loss": 0.6225, + "step": 469 + }, + { + "epoch": 0.019394239498225635, + "grad_norm": 9.870358421866415, + "learning_rate": 1.9368131868131868e-06, + "loss": 0.5955, + "step": 470 + }, + { + "epoch": 0.01943550383758356, + "grad_norm": 3.541277978603329, + "learning_rate": 1.940934065934066e-06, + "loss": 0.6783, + "step": 471 + }, + { + "epoch": 0.019476768176941488, + "grad_norm": 4.571819514464689, + "learning_rate": 1.945054945054945e-06, + "loss": 0.6694, + "step": 472 + }, + { + "epoch": 0.019518032516299414, + "grad_norm": 5.9265364825761155, + "learning_rate": 1.9491758241758244e-06, + "loss": 0.7027, + "step": 473 + }, + { + "epoch": 0.01955929685565734, + "grad_norm": 14.19173354359185, + "learning_rate": 1.9532967032967034e-06, + "loss": 0.7076, + "step": 474 + }, + { + "epoch": 0.019600561195015267, + "grad_norm": 3.35322334797927, + "learning_rate": 1.9574175824175823e-06, + "loss": 0.6775, + "step": 475 + }, + { + "epoch": 0.019641825534373193, + "grad_norm": 4.2487689741100105, + "learning_rate": 1.9615384615384617e-06, + "loss": 0.6349, + "step": 476 + }, + { + "epoch": 0.019683089873731123, + "grad_norm": 6.204761244506814, + "learning_rate": 1.9656593406593406e-06, + "loss": 0.7049, + "step": 477 + }, + { + "epoch": 0.01972435421308905, + "grad_norm": 4.959536742216062, + "learning_rate": 1.96978021978022e-06, + "loss": 0.631, + "step": 478 + }, + { + "epoch": 0.019765618552446976, + "grad_norm": 4.887882075513262, + "learning_rate": 1.973901098901099e-06, + "loss": 0.6263, + "step": 479 + }, + { + "epoch": 0.019806882891804902, + "grad_norm": 4.0634945460509675, + "learning_rate": 1.9780219780219782e-06, + "loss": 0.6727, + "step": 480 + }, + { + "epoch": 0.01984814723116283, + "grad_norm": 3.5323100907589793, + "learning_rate": 1.982142857142857e-06, + "loss": 0.6349, + "step": 481 + }, + { + "epoch": 0.019889411570520755, + "grad_norm": 3.6601760651469775, + "learning_rate": 1.986263736263736e-06, + "loss": 0.6564, + "step": 482 + }, + { + "epoch": 0.01993067590987868, + "grad_norm": 9.36757168004897, + "learning_rate": 1.9903846153846155e-06, + "loss": 0.7017, + "step": 483 + }, + { + "epoch": 0.01997194024923661, + "grad_norm": 4.313062623475168, + "learning_rate": 1.9945054945054944e-06, + "loss": 0.7012, + "step": 484 + }, + { + "epoch": 0.020013204588594537, + "grad_norm": 4.88610196467759, + "learning_rate": 1.9986263736263737e-06, + "loss": 0.6444, + "step": 485 + }, + { + "epoch": 0.020054468927952464, + "grad_norm": 6.111015578172428, + "learning_rate": 2.0027472527472527e-06, + "loss": 0.6871, + "step": 486 + }, + { + "epoch": 0.02009573326731039, + "grad_norm": 9.457553086336764, + "learning_rate": 2.0068681318681316e-06, + "loss": 0.6317, + "step": 487 + }, + { + "epoch": 0.020136997606668317, + "grad_norm": 8.14390869748849, + "learning_rate": 2.010989010989011e-06, + "loss": 0.6456, + "step": 488 + }, + { + "epoch": 0.020178261946026243, + "grad_norm": 4.318439040318561, + "learning_rate": 2.01510989010989e-06, + "loss": 0.6651, + "step": 489 + }, + { + "epoch": 0.020219526285384173, + "grad_norm": 11.15240731453689, + "learning_rate": 2.0192307692307692e-06, + "loss": 0.6436, + "step": 490 + }, + { + "epoch": 0.0202607906247421, + "grad_norm": 3.848447170544111, + "learning_rate": 2.0233516483516486e-06, + "loss": 0.7337, + "step": 491 + }, + { + "epoch": 0.020302054964100025, + "grad_norm": 3.0096692717613034, + "learning_rate": 2.0274725274725275e-06, + "loss": 0.6334, + "step": 492 + }, + { + "epoch": 0.020343319303457952, + "grad_norm": 5.602170551106705, + "learning_rate": 2.031593406593407e-06, + "loss": 0.7602, + "step": 493 + }, + { + "epoch": 0.020384583642815878, + "grad_norm": 4.264632457752926, + "learning_rate": 2.035714285714286e-06, + "loss": 0.687, + "step": 494 + }, + { + "epoch": 0.020425847982173805, + "grad_norm": 3.9533095363284585, + "learning_rate": 2.0398351648351648e-06, + "loss": 0.6372, + "step": 495 + }, + { + "epoch": 0.02046711232153173, + "grad_norm": 3.7553667088377622, + "learning_rate": 2.043956043956044e-06, + "loss": 0.6606, + "step": 496 + }, + { + "epoch": 0.02050837666088966, + "grad_norm": 7.7719823445061875, + "learning_rate": 2.048076923076923e-06, + "loss": 0.6706, + "step": 497 + }, + { + "epoch": 0.020549641000247587, + "grad_norm": 4.915671570664961, + "learning_rate": 2.0521978021978024e-06, + "loss": 0.6622, + "step": 498 + }, + { + "epoch": 0.020590905339605513, + "grad_norm": 4.564577777064181, + "learning_rate": 2.0563186813186813e-06, + "loss": 0.6726, + "step": 499 + }, + { + "epoch": 0.02063216967896344, + "grad_norm": 4.067961345364734, + "learning_rate": 2.0604395604395607e-06, + "loss": 0.6616, + "step": 500 + }, + { + "epoch": 0.020673434018321366, + "grad_norm": 5.593268068501865, + "learning_rate": 2.0645604395604396e-06, + "loss": 0.6854, + "step": 501 + }, + { + "epoch": 0.020714698357679293, + "grad_norm": 2.7863767503671246, + "learning_rate": 2.0686813186813186e-06, + "loss": 0.6285, + "step": 502 + }, + { + "epoch": 0.02075596269703722, + "grad_norm": 4.361331087090155, + "learning_rate": 2.072802197802198e-06, + "loss": 0.6633, + "step": 503 + }, + { + "epoch": 0.02079722703639515, + "grad_norm": 7.623263737929246, + "learning_rate": 2.076923076923077e-06, + "loss": 0.6966, + "step": 504 + }, + { + "epoch": 0.020838491375753075, + "grad_norm": 5.540069090575869, + "learning_rate": 2.081043956043956e-06, + "loss": 0.6163, + "step": 505 + }, + { + "epoch": 0.020879755715111, + "grad_norm": 4.286343151355112, + "learning_rate": 2.085164835164835e-06, + "loss": 0.6696, + "step": 506 + }, + { + "epoch": 0.020921020054468928, + "grad_norm": 5.154928459169306, + "learning_rate": 2.089285714285714e-06, + "loss": 0.5969, + "step": 507 + }, + { + "epoch": 0.020962284393826854, + "grad_norm": 6.717229960328498, + "learning_rate": 2.0934065934065934e-06, + "loss": 0.6244, + "step": 508 + }, + { + "epoch": 0.02100354873318478, + "grad_norm": 5.23700432193627, + "learning_rate": 2.0975274725274724e-06, + "loss": 0.6975, + "step": 509 + }, + { + "epoch": 0.021044813072542707, + "grad_norm": 3.1570250995529627, + "learning_rate": 2.1016483516483517e-06, + "loss": 0.6739, + "step": 510 + }, + { + "epoch": 0.021086077411900637, + "grad_norm": 5.209183706791725, + "learning_rate": 2.1057692307692306e-06, + "loss": 0.6884, + "step": 511 + }, + { + "epoch": 0.021127341751258563, + "grad_norm": 39.151366462867365, + "learning_rate": 2.10989010989011e-06, + "loss": 0.7118, + "step": 512 + }, + { + "epoch": 0.02116860609061649, + "grad_norm": 8.142450612474573, + "learning_rate": 2.1140109890109894e-06, + "loss": 0.6238, + "step": 513 + }, + { + "epoch": 0.021209870429974416, + "grad_norm": 5.2038427058881656, + "learning_rate": 2.1181318681318683e-06, + "loss": 0.6401, + "step": 514 + }, + { + "epoch": 0.021251134769332342, + "grad_norm": 6.969790787582501, + "learning_rate": 2.1222527472527472e-06, + "loss": 0.6725, + "step": 515 + }, + { + "epoch": 0.02129239910869027, + "grad_norm": 3.7150923550734154, + "learning_rate": 2.1263736263736266e-06, + "loss": 0.6422, + "step": 516 + }, + { + "epoch": 0.0213336634480482, + "grad_norm": 6.0424092097814, + "learning_rate": 2.1304945054945055e-06, + "loss": 0.6857, + "step": 517 + }, + { + "epoch": 0.021374927787406125, + "grad_norm": 3.3599593000466434, + "learning_rate": 2.134615384615385e-06, + "loss": 0.6542, + "step": 518 + }, + { + "epoch": 0.02141619212676405, + "grad_norm": 4.952343134435551, + "learning_rate": 2.138736263736264e-06, + "loss": 0.6226, + "step": 519 + }, + { + "epoch": 0.021457456466121978, + "grad_norm": 7.925066970097956, + "learning_rate": 2.142857142857143e-06, + "loss": 0.6442, + "step": 520 + }, + { + "epoch": 0.021498720805479904, + "grad_norm": 3.785485315845301, + "learning_rate": 2.146978021978022e-06, + "loss": 0.6217, + "step": 521 + }, + { + "epoch": 0.02153998514483783, + "grad_norm": 5.9468179241907935, + "learning_rate": 2.151098901098901e-06, + "loss": 0.6664, + "step": 522 + }, + { + "epoch": 0.021581249484195757, + "grad_norm": 9.963791765583002, + "learning_rate": 2.1552197802197804e-06, + "loss": 0.6964, + "step": 523 + }, + { + "epoch": 0.021622513823553687, + "grad_norm": 3.8291834195657937, + "learning_rate": 2.1593406593406593e-06, + "loss": 0.6192, + "step": 524 + }, + { + "epoch": 0.021663778162911613, + "grad_norm": 4.068053776334074, + "learning_rate": 2.1634615384615387e-06, + "loss": 0.6847, + "step": 525 + }, + { + "epoch": 0.02170504250226954, + "grad_norm": 6.261378047566836, + "learning_rate": 2.1675824175824176e-06, + "loss": 0.6281, + "step": 526 + }, + { + "epoch": 0.021746306841627466, + "grad_norm": 4.657982956399255, + "learning_rate": 2.1717032967032965e-06, + "loss": 0.6866, + "step": 527 + }, + { + "epoch": 0.021787571180985392, + "grad_norm": 6.66313176751287, + "learning_rate": 2.175824175824176e-06, + "loss": 0.6841, + "step": 528 + }, + { + "epoch": 0.02182883552034332, + "grad_norm": 3.509261165535793, + "learning_rate": 2.179945054945055e-06, + "loss": 0.6447, + "step": 529 + }, + { + "epoch": 0.021870099859701245, + "grad_norm": 3.486301537144216, + "learning_rate": 2.184065934065934e-06, + "loss": 0.6158, + "step": 530 + }, + { + "epoch": 0.021911364199059175, + "grad_norm": 4.955877449193685, + "learning_rate": 2.188186813186813e-06, + "loss": 0.7131, + "step": 531 + }, + { + "epoch": 0.0219526285384171, + "grad_norm": 3.7299690216526016, + "learning_rate": 2.192307692307692e-06, + "loss": 0.6366, + "step": 532 + }, + { + "epoch": 0.021993892877775027, + "grad_norm": 5.588197840487826, + "learning_rate": 2.1964285714285714e-06, + "loss": 0.6438, + "step": 533 + }, + { + "epoch": 0.022035157217132954, + "grad_norm": 6.5768325493726065, + "learning_rate": 2.2005494505494503e-06, + "loss": 0.6368, + "step": 534 + }, + { + "epoch": 0.02207642155649088, + "grad_norm": 3.644801618293816, + "learning_rate": 2.2046703296703297e-06, + "loss": 0.644, + "step": 535 + }, + { + "epoch": 0.022117685895848806, + "grad_norm": 4.889602359481971, + "learning_rate": 2.208791208791209e-06, + "loss": 0.6348, + "step": 536 + }, + { + "epoch": 0.022158950235206733, + "grad_norm": 9.352992419189038, + "learning_rate": 2.212912087912088e-06, + "loss": 0.6417, + "step": 537 + }, + { + "epoch": 0.022200214574564663, + "grad_norm": 4.121248276164391, + "learning_rate": 2.2170329670329673e-06, + "loss": 0.6478, + "step": 538 + }, + { + "epoch": 0.02224147891392259, + "grad_norm": 4.121204676282453, + "learning_rate": 2.2211538461538463e-06, + "loss": 0.6927, + "step": 539 + }, + { + "epoch": 0.022282743253280515, + "grad_norm": 7.471972383135158, + "learning_rate": 2.2252747252747256e-06, + "loss": 0.651, + "step": 540 + }, + { + "epoch": 0.02232400759263844, + "grad_norm": 5.441977531550609, + "learning_rate": 2.2293956043956045e-06, + "loss": 0.7442, + "step": 541 + }, + { + "epoch": 0.022365271931996368, + "grad_norm": 3.900750414225993, + "learning_rate": 2.2335164835164835e-06, + "loss": 0.6905, + "step": 542 + }, + { + "epoch": 0.022406536271354294, + "grad_norm": 9.306575894942299, + "learning_rate": 2.237637362637363e-06, + "loss": 0.6648, + "step": 543 + }, + { + "epoch": 0.02244780061071222, + "grad_norm": 2.8934425032974, + "learning_rate": 2.2417582417582418e-06, + "loss": 0.6192, + "step": 544 + }, + { + "epoch": 0.02248906495007015, + "grad_norm": 7.242928393466268, + "learning_rate": 2.245879120879121e-06, + "loss": 0.5795, + "step": 545 + }, + { + "epoch": 0.022530329289428077, + "grad_norm": 2.811170298271752, + "learning_rate": 2.25e-06, + "loss": 0.7065, + "step": 546 + }, + { + "epoch": 0.022571593628786003, + "grad_norm": 6.939529823291988, + "learning_rate": 2.254120879120879e-06, + "loss": 0.6131, + "step": 547 + }, + { + "epoch": 0.02261285796814393, + "grad_norm": 3.7797691371685342, + "learning_rate": 2.2582417582417583e-06, + "loss": 0.6034, + "step": 548 + }, + { + "epoch": 0.022654122307501856, + "grad_norm": 3.1010946963186, + "learning_rate": 2.2623626373626373e-06, + "loss": 0.6234, + "step": 549 + }, + { + "epoch": 0.022695386646859782, + "grad_norm": 4.381747463143493, + "learning_rate": 2.2664835164835166e-06, + "loss": 0.6331, + "step": 550 + }, + { + "epoch": 0.022736650986217712, + "grad_norm": 5.635295565668601, + "learning_rate": 2.2706043956043956e-06, + "loss": 0.6238, + "step": 551 + }, + { + "epoch": 0.02277791532557564, + "grad_norm": 5.032799671161403, + "learning_rate": 2.2747252747252745e-06, + "loss": 0.6378, + "step": 552 + }, + { + "epoch": 0.022819179664933565, + "grad_norm": 4.8790473048448435, + "learning_rate": 2.278846153846154e-06, + "loss": 0.6282, + "step": 553 + }, + { + "epoch": 0.02286044400429149, + "grad_norm": 15.668238103842102, + "learning_rate": 2.2829670329670328e-06, + "loss": 0.6024, + "step": 554 + }, + { + "epoch": 0.022901708343649418, + "grad_norm": 3.6862489870871014, + "learning_rate": 2.287087912087912e-06, + "loss": 0.6679, + "step": 555 + }, + { + "epoch": 0.022942972683007344, + "grad_norm": 2.8772093953634967, + "learning_rate": 2.291208791208791e-06, + "loss": 0.6755, + "step": 556 + }, + { + "epoch": 0.02298423702236527, + "grad_norm": 24.089954175029792, + "learning_rate": 2.2953296703296704e-06, + "loss": 0.6348, + "step": 557 + }, + { + "epoch": 0.0230255013617232, + "grad_norm": 6.308369624884057, + "learning_rate": 2.2994505494505498e-06, + "loss": 0.6441, + "step": 558 + }, + { + "epoch": 0.023066765701081127, + "grad_norm": 3.119923149886929, + "learning_rate": 2.3035714285714287e-06, + "loss": 0.6151, + "step": 559 + }, + { + "epoch": 0.023108030040439053, + "grad_norm": 2.904730302105407, + "learning_rate": 2.307692307692308e-06, + "loss": 0.6702, + "step": 560 + }, + { + "epoch": 0.02314929437979698, + "grad_norm": 4.609701181006772, + "learning_rate": 2.311813186813187e-06, + "loss": 0.6178, + "step": 561 + }, + { + "epoch": 0.023190558719154906, + "grad_norm": 2.870296524258161, + "learning_rate": 2.315934065934066e-06, + "loss": 0.6055, + "step": 562 + }, + { + "epoch": 0.023231823058512832, + "grad_norm": 15.944859656620379, + "learning_rate": 2.3200549450549453e-06, + "loss": 0.6466, + "step": 563 + }, + { + "epoch": 0.02327308739787076, + "grad_norm": 4.327580726163873, + "learning_rate": 2.3241758241758242e-06, + "loss": 0.6189, + "step": 564 + }, + { + "epoch": 0.02331435173722869, + "grad_norm": 6.545600661573251, + "learning_rate": 2.3282967032967036e-06, + "loss": 0.7466, + "step": 565 + }, + { + "epoch": 0.023355616076586615, + "grad_norm": 4.562188141632459, + "learning_rate": 2.3324175824175825e-06, + "loss": 0.7019, + "step": 566 + }, + { + "epoch": 0.02339688041594454, + "grad_norm": 8.051355004720937, + "learning_rate": 2.3365384615384615e-06, + "loss": 0.6425, + "step": 567 + }, + { + "epoch": 0.023438144755302467, + "grad_norm": 3.5702540089446364, + "learning_rate": 2.340659340659341e-06, + "loss": 0.6072, + "step": 568 + }, + { + "epoch": 0.023479409094660394, + "grad_norm": 3.1099962731061086, + "learning_rate": 2.3447802197802197e-06, + "loss": 0.6186, + "step": 569 + }, + { + "epoch": 0.02352067343401832, + "grad_norm": 7.084974129873869, + "learning_rate": 2.348901098901099e-06, + "loss": 0.6375, + "step": 570 + }, + { + "epoch": 0.023561937773376247, + "grad_norm": 6.588597011011588, + "learning_rate": 2.353021978021978e-06, + "loss": 0.6649, + "step": 571 + }, + { + "epoch": 0.023603202112734176, + "grad_norm": 19.314473379148694, + "learning_rate": 2.357142857142857e-06, + "loss": 0.6752, + "step": 572 + }, + { + "epoch": 0.023644466452092103, + "grad_norm": 9.837743465219832, + "learning_rate": 2.3612637362637363e-06, + "loss": 0.6085, + "step": 573 + }, + { + "epoch": 0.02368573079145003, + "grad_norm": 4.477719122944988, + "learning_rate": 2.3653846153846152e-06, + "loss": 0.6627, + "step": 574 + }, + { + "epoch": 0.023726995130807955, + "grad_norm": 2.787384239632268, + "learning_rate": 2.3695054945054946e-06, + "loss": 0.6441, + "step": 575 + }, + { + "epoch": 0.023768259470165882, + "grad_norm": 3.292419630674994, + "learning_rate": 2.3736263736263735e-06, + "loss": 0.6926, + "step": 576 + }, + { + "epoch": 0.023809523809523808, + "grad_norm": 2.926698858329206, + "learning_rate": 2.377747252747253e-06, + "loss": 0.6324, + "step": 577 + }, + { + "epoch": 0.023850788148881738, + "grad_norm": 5.118299568900494, + "learning_rate": 2.381868131868132e-06, + "loss": 0.6095, + "step": 578 + }, + { + "epoch": 0.023892052488239664, + "grad_norm": 2.8261048260829704, + "learning_rate": 2.3859890109890108e-06, + "loss": 0.617, + "step": 579 + }, + { + "epoch": 0.02393331682759759, + "grad_norm": 4.794784541155993, + "learning_rate": 2.39010989010989e-06, + "loss": 0.693, + "step": 580 + }, + { + "epoch": 0.023974581166955517, + "grad_norm": 14.990166275770964, + "learning_rate": 2.3942307692307695e-06, + "loss": 0.7112, + "step": 581 + }, + { + "epoch": 0.024015845506313444, + "grad_norm": 4.573428488232782, + "learning_rate": 2.3983516483516484e-06, + "loss": 0.6343, + "step": 582 + }, + { + "epoch": 0.02405710984567137, + "grad_norm": 4.576879288460086, + "learning_rate": 2.4024725274725278e-06, + "loss": 0.6057, + "step": 583 + }, + { + "epoch": 0.024098374185029296, + "grad_norm": 3.000980868431347, + "learning_rate": 2.4065934065934067e-06, + "loss": 0.6156, + "step": 584 + }, + { + "epoch": 0.024139638524387226, + "grad_norm": 3.471614897949898, + "learning_rate": 2.410714285714286e-06, + "loss": 0.6237, + "step": 585 + }, + { + "epoch": 0.024180902863745152, + "grad_norm": 10.67853103850109, + "learning_rate": 2.414835164835165e-06, + "loss": 0.6333, + "step": 586 + }, + { + "epoch": 0.02422216720310308, + "grad_norm": 4.162448149667273, + "learning_rate": 2.418956043956044e-06, + "loss": 0.6291, + "step": 587 + }, + { + "epoch": 0.024263431542461005, + "grad_norm": 4.590454161115066, + "learning_rate": 2.4230769230769233e-06, + "loss": 0.6398, + "step": 588 + }, + { + "epoch": 0.02430469588181893, + "grad_norm": 2.5037132609459944, + "learning_rate": 2.427197802197802e-06, + "loss": 0.6363, + "step": 589 + }, + { + "epoch": 0.024345960221176858, + "grad_norm": 3.4954226593395066, + "learning_rate": 2.4313186813186816e-06, + "loss": 0.6478, + "step": 590 + }, + { + "epoch": 0.024387224560534784, + "grad_norm": 3.210084705411342, + "learning_rate": 2.4354395604395605e-06, + "loss": 0.6123, + "step": 591 + }, + { + "epoch": 0.024428488899892714, + "grad_norm": 13.81998901852301, + "learning_rate": 2.4395604395604394e-06, + "loss": 0.6058, + "step": 592 + }, + { + "epoch": 0.02446975323925064, + "grad_norm": 4.7490987028881895, + "learning_rate": 2.4436813186813188e-06, + "loss": 0.6312, + "step": 593 + }, + { + "epoch": 0.024511017578608567, + "grad_norm": 2.9571993829481387, + "learning_rate": 2.4478021978021977e-06, + "loss": 0.6003, + "step": 594 + }, + { + "epoch": 0.024552281917966493, + "grad_norm": 7.887539860495271, + "learning_rate": 2.451923076923077e-06, + "loss": 0.6365, + "step": 595 + }, + { + "epoch": 0.02459354625732442, + "grad_norm": 3.359672560475533, + "learning_rate": 2.456043956043956e-06, + "loss": 0.6016, + "step": 596 + }, + { + "epoch": 0.024634810596682346, + "grad_norm": 5.392508958761516, + "learning_rate": 2.4601648351648354e-06, + "loss": 0.6877, + "step": 597 + }, + { + "epoch": 0.024676074936040272, + "grad_norm": 2.807263246933224, + "learning_rate": 2.4642857142857143e-06, + "loss": 0.6469, + "step": 598 + }, + { + "epoch": 0.024717339275398202, + "grad_norm": 3.731118012127084, + "learning_rate": 2.4684065934065932e-06, + "loss": 0.6092, + "step": 599 + }, + { + "epoch": 0.02475860361475613, + "grad_norm": 3.192852042751125, + "learning_rate": 2.4725274725274726e-06, + "loss": 0.6733, + "step": 600 + }, + { + "epoch": 0.024799867954114055, + "grad_norm": 2.606372733738563, + "learning_rate": 2.4766483516483515e-06, + "loss": 0.5925, + "step": 601 + }, + { + "epoch": 0.02484113229347198, + "grad_norm": 3.50814325334133, + "learning_rate": 2.480769230769231e-06, + "loss": 0.6507, + "step": 602 + }, + { + "epoch": 0.024882396632829908, + "grad_norm": 6.812824146867519, + "learning_rate": 2.4848901098901102e-06, + "loss": 0.5811, + "step": 603 + }, + { + "epoch": 0.024923660972187834, + "grad_norm": 5.436222121949449, + "learning_rate": 2.489010989010989e-06, + "loss": 0.5928, + "step": 604 + }, + { + "epoch": 0.024964925311545764, + "grad_norm": 6.072914613306816, + "learning_rate": 2.4931318681318685e-06, + "loss": 0.646, + "step": 605 + }, + { + "epoch": 0.02500618965090369, + "grad_norm": 2.5312512595114334, + "learning_rate": 2.4972527472527474e-06, + "loss": 0.6256, + "step": 606 + }, + { + "epoch": 0.025047453990261617, + "grad_norm": 7.390444824074508, + "learning_rate": 2.5013736263736264e-06, + "loss": 0.673, + "step": 607 + }, + { + "epoch": 0.025088718329619543, + "grad_norm": 3.4838051413132805, + "learning_rate": 2.5054945054945057e-06, + "loss": 0.6848, + "step": 608 + }, + { + "epoch": 0.02512998266897747, + "grad_norm": 2.859588294616935, + "learning_rate": 2.5096153846153847e-06, + "loss": 0.6538, + "step": 609 + }, + { + "epoch": 0.025171247008335396, + "grad_norm": 5.167745526284166, + "learning_rate": 2.513736263736264e-06, + "loss": 0.6334, + "step": 610 + }, + { + "epoch": 0.025212511347693322, + "grad_norm": 3.151425645993797, + "learning_rate": 2.517857142857143e-06, + "loss": 0.6036, + "step": 611 + }, + { + "epoch": 0.025253775687051252, + "grad_norm": 4.177960425286208, + "learning_rate": 2.521978021978022e-06, + "loss": 0.6764, + "step": 612 + }, + { + "epoch": 0.025295040026409178, + "grad_norm": 7.163200310740416, + "learning_rate": 2.5260989010989012e-06, + "loss": 0.6607, + "step": 613 + }, + { + "epoch": 0.025336304365767105, + "grad_norm": 6.038354114037331, + "learning_rate": 2.53021978021978e-06, + "loss": 0.6074, + "step": 614 + }, + { + "epoch": 0.02537756870512503, + "grad_norm": 3.7172755964089625, + "learning_rate": 2.5343406593406595e-06, + "loss": 0.6158, + "step": 615 + }, + { + "epoch": 0.025418833044482957, + "grad_norm": 4.422505168343981, + "learning_rate": 2.5384615384615385e-06, + "loss": 0.6838, + "step": 616 + }, + { + "epoch": 0.025460097383840884, + "grad_norm": 7.717227450158656, + "learning_rate": 2.542582417582418e-06, + "loss": 0.6704, + "step": 617 + }, + { + "epoch": 0.02550136172319881, + "grad_norm": 2.787911112815659, + "learning_rate": 2.5467032967032967e-06, + "loss": 0.656, + "step": 618 + }, + { + "epoch": 0.02554262606255674, + "grad_norm": 4.49796270450235, + "learning_rate": 2.5508241758241757e-06, + "loss": 0.607, + "step": 619 + }, + { + "epoch": 0.025583890401914666, + "grad_norm": 3.9839985149354074, + "learning_rate": 2.554945054945055e-06, + "loss": 0.6375, + "step": 620 + }, + { + "epoch": 0.025625154741272593, + "grad_norm": 2.3496578320791364, + "learning_rate": 2.559065934065934e-06, + "loss": 0.5812, + "step": 621 + }, + { + "epoch": 0.02566641908063052, + "grad_norm": 3.694319359065871, + "learning_rate": 2.5631868131868133e-06, + "loss": 0.613, + "step": 622 + }, + { + "epoch": 0.025707683419988445, + "grad_norm": 3.759490462502987, + "learning_rate": 2.5673076923076923e-06, + "loss": 0.5739, + "step": 623 + }, + { + "epoch": 0.02574894775934637, + "grad_norm": 11.120475165145054, + "learning_rate": 2.571428571428571e-06, + "loss": 0.6338, + "step": 624 + }, + { + "epoch": 0.025790212098704298, + "grad_norm": 2.4994475745846585, + "learning_rate": 2.5755494505494505e-06, + "loss": 0.7293, + "step": 625 + }, + { + "epoch": 0.025831476438062228, + "grad_norm": 10.062747804685108, + "learning_rate": 2.57967032967033e-06, + "loss": 0.6415, + "step": 626 + }, + { + "epoch": 0.025872740777420154, + "grad_norm": 2.753106208735615, + "learning_rate": 2.583791208791209e-06, + "loss": 0.6241, + "step": 627 + }, + { + "epoch": 0.02591400511677808, + "grad_norm": 4.249551441763036, + "learning_rate": 2.587912087912088e-06, + "loss": 0.6226, + "step": 628 + }, + { + "epoch": 0.025955269456136007, + "grad_norm": 4.081000668710316, + "learning_rate": 2.592032967032967e-06, + "loss": 0.64, + "step": 629 + }, + { + "epoch": 0.025996533795493933, + "grad_norm": 8.79960471792203, + "learning_rate": 2.5961538461538465e-06, + "loss": 0.6247, + "step": 630 + }, + { + "epoch": 0.02603779813485186, + "grad_norm": 4.495053881107029, + "learning_rate": 2.6002747252747254e-06, + "loss": 0.6017, + "step": 631 + }, + { + "epoch": 0.02607906247420979, + "grad_norm": 5.297107755904907, + "learning_rate": 2.6043956043956043e-06, + "loss": 0.6862, + "step": 632 + }, + { + "epoch": 0.026120326813567716, + "grad_norm": 30.57530756081229, + "learning_rate": 2.6085164835164837e-06, + "loss": 0.6975, + "step": 633 + }, + { + "epoch": 0.026161591152925642, + "grad_norm": 4.191156004164431, + "learning_rate": 2.6126373626373626e-06, + "loss": 0.6179, + "step": 634 + }, + { + "epoch": 0.02620285549228357, + "grad_norm": 3.4068855458941174, + "learning_rate": 2.616758241758242e-06, + "loss": 0.6018, + "step": 635 + }, + { + "epoch": 0.026244119831641495, + "grad_norm": 3.9116455951534563, + "learning_rate": 2.620879120879121e-06, + "loss": 0.6274, + "step": 636 + }, + { + "epoch": 0.02628538417099942, + "grad_norm": 6.789606689325224, + "learning_rate": 2.6250000000000003e-06, + "loss": 0.6376, + "step": 637 + }, + { + "epoch": 0.026326648510357348, + "grad_norm": 2.9743600483902277, + "learning_rate": 2.629120879120879e-06, + "loss": 0.6024, + "step": 638 + }, + { + "epoch": 0.026367912849715278, + "grad_norm": 5.860598087523523, + "learning_rate": 2.633241758241758e-06, + "loss": 0.6547, + "step": 639 + }, + { + "epoch": 0.026409177189073204, + "grad_norm": 3.881011404901765, + "learning_rate": 2.6373626373626375e-06, + "loss": 0.6776, + "step": 640 + }, + { + "epoch": 0.02645044152843113, + "grad_norm": 3.1939983113145325, + "learning_rate": 2.6414835164835164e-06, + "loss": 0.5734, + "step": 641 + }, + { + "epoch": 0.026491705867789057, + "grad_norm": 19.757473176194676, + "learning_rate": 2.6456043956043958e-06, + "loss": 0.647, + "step": 642 + }, + { + "epoch": 0.026532970207146983, + "grad_norm": 8.830905676960336, + "learning_rate": 2.6497252747252747e-06, + "loss": 0.6187, + "step": 643 + }, + { + "epoch": 0.02657423454650491, + "grad_norm": 7.134808707870201, + "learning_rate": 2.6538461538461537e-06, + "loss": 0.6475, + "step": 644 + }, + { + "epoch": 0.026615498885862836, + "grad_norm": 5.2626410512508635, + "learning_rate": 2.657967032967033e-06, + "loss": 0.6823, + "step": 645 + }, + { + "epoch": 0.026656763225220766, + "grad_norm": 6.642577947617221, + "learning_rate": 2.662087912087912e-06, + "loss": 0.6393, + "step": 646 + }, + { + "epoch": 0.026698027564578692, + "grad_norm": 4.876246228856213, + "learning_rate": 2.6662087912087913e-06, + "loss": 0.6327, + "step": 647 + }, + { + "epoch": 0.02673929190393662, + "grad_norm": 3.453617643590183, + "learning_rate": 2.6703296703296702e-06, + "loss": 0.596, + "step": 648 + }, + { + "epoch": 0.026780556243294545, + "grad_norm": 4.262566360606932, + "learning_rate": 2.6744505494505496e-06, + "loss": 0.6462, + "step": 649 + }, + { + "epoch": 0.02682182058265247, + "grad_norm": 2.9410180096649072, + "learning_rate": 2.678571428571429e-06, + "loss": 0.5893, + "step": 650 + }, + { + "epoch": 0.026863084922010397, + "grad_norm": 2.6591084385889703, + "learning_rate": 2.682692307692308e-06, + "loss": 0.6489, + "step": 651 + }, + { + "epoch": 0.026904349261368324, + "grad_norm": 3.990064550595433, + "learning_rate": 2.686813186813187e-06, + "loss": 0.6972, + "step": 652 + }, + { + "epoch": 0.026945613600726254, + "grad_norm": 2.6107090299135463, + "learning_rate": 2.690934065934066e-06, + "loss": 0.6581, + "step": 653 + }, + { + "epoch": 0.02698687794008418, + "grad_norm": 2.7478114061427794, + "learning_rate": 2.695054945054945e-06, + "loss": 0.5785, + "step": 654 + }, + { + "epoch": 0.027028142279442106, + "grad_norm": 5.237215234826112, + "learning_rate": 2.6991758241758245e-06, + "loss": 0.6715, + "step": 655 + }, + { + "epoch": 0.027069406618800033, + "grad_norm": 2.5675077176384193, + "learning_rate": 2.7032967032967034e-06, + "loss": 0.6606, + "step": 656 + }, + { + "epoch": 0.02711067095815796, + "grad_norm": 2.364241988896969, + "learning_rate": 2.7074175824175823e-06, + "loss": 0.5944, + "step": 657 + }, + { + "epoch": 0.027151935297515885, + "grad_norm": 10.218501720678267, + "learning_rate": 2.7115384615384617e-06, + "loss": 0.6738, + "step": 658 + }, + { + "epoch": 0.027193199636873815, + "grad_norm": 80.12802680929148, + "learning_rate": 2.7156593406593406e-06, + "loss": 0.5613, + "step": 659 + }, + { + "epoch": 0.02723446397623174, + "grad_norm": 3.199613423172436, + "learning_rate": 2.71978021978022e-06, + "loss": 0.6709, + "step": 660 + }, + { + "epoch": 0.027275728315589668, + "grad_norm": 3.7421051800813756, + "learning_rate": 2.723901098901099e-06, + "loss": 0.6927, + "step": 661 + }, + { + "epoch": 0.027316992654947594, + "grad_norm": 2.043363778856883, + "learning_rate": 2.7280219780219782e-06, + "loss": 0.6243, + "step": 662 + }, + { + "epoch": 0.02735825699430552, + "grad_norm": 4.523874014490057, + "learning_rate": 2.732142857142857e-06, + "loss": 0.6187, + "step": 663 + }, + { + "epoch": 0.027399521333663447, + "grad_norm": 6.2744995782596185, + "learning_rate": 2.736263736263736e-06, + "loss": 0.6369, + "step": 664 + }, + { + "epoch": 0.027440785673021374, + "grad_norm": 3.0579479846636772, + "learning_rate": 2.7403846153846155e-06, + "loss": 0.6502, + "step": 665 + }, + { + "epoch": 0.027482050012379303, + "grad_norm": 7.391316266729372, + "learning_rate": 2.7445054945054944e-06, + "loss": 0.6484, + "step": 666 + }, + { + "epoch": 0.02752331435173723, + "grad_norm": 21.79504377907907, + "learning_rate": 2.7486263736263738e-06, + "loss": 0.5702, + "step": 667 + }, + { + "epoch": 0.027564578691095156, + "grad_norm": 2.1450892616173425, + "learning_rate": 2.7527472527472527e-06, + "loss": 0.5962, + "step": 668 + }, + { + "epoch": 0.027605843030453082, + "grad_norm": 2.926128251948536, + "learning_rate": 2.7568681318681316e-06, + "loss": 0.665, + "step": 669 + }, + { + "epoch": 0.02764710736981101, + "grad_norm": 3.5740966168489012, + "learning_rate": 2.760989010989011e-06, + "loss": 0.6565, + "step": 670 + }, + { + "epoch": 0.027688371709168935, + "grad_norm": 3.2372802274257486, + "learning_rate": 2.76510989010989e-06, + "loss": 0.6688, + "step": 671 + }, + { + "epoch": 0.02772963604852686, + "grad_norm": 3.4906700610469583, + "learning_rate": 2.7692307692307693e-06, + "loss": 0.6139, + "step": 672 + }, + { + "epoch": 0.02777090038788479, + "grad_norm": 18.06927901494635, + "learning_rate": 2.7733516483516486e-06, + "loss": 0.6709, + "step": 673 + }, + { + "epoch": 0.027812164727242718, + "grad_norm": 10.153652870960778, + "learning_rate": 2.7774725274725276e-06, + "loss": 0.6809, + "step": 674 + }, + { + "epoch": 0.027853429066600644, + "grad_norm": 3.177528184926765, + "learning_rate": 2.781593406593407e-06, + "loss": 0.6242, + "step": 675 + }, + { + "epoch": 0.02789469340595857, + "grad_norm": 6.922674738653239, + "learning_rate": 2.785714285714286e-06, + "loss": 0.6023, + "step": 676 + }, + { + "epoch": 0.027935957745316497, + "grad_norm": 3.0392734966227124, + "learning_rate": 2.7898351648351648e-06, + "loss": 0.6122, + "step": 677 + }, + { + "epoch": 0.027977222084674423, + "grad_norm": 3.800447175322242, + "learning_rate": 2.793956043956044e-06, + "loss": 0.6363, + "step": 678 + }, + { + "epoch": 0.02801848642403235, + "grad_norm": 2.9407220525087836, + "learning_rate": 2.798076923076923e-06, + "loss": 0.6673, + "step": 679 + }, + { + "epoch": 0.02805975076339028, + "grad_norm": 7.358106918970251, + "learning_rate": 2.8021978021978024e-06, + "loss": 0.698, + "step": 680 + }, + { + "epoch": 0.028101015102748206, + "grad_norm": 3.6328793293575985, + "learning_rate": 2.8063186813186814e-06, + "loss": 0.5696, + "step": 681 + }, + { + "epoch": 0.028142279442106132, + "grad_norm": 3.406799201415885, + "learning_rate": 2.8104395604395607e-06, + "loss": 0.6068, + "step": 682 + }, + { + "epoch": 0.02818354378146406, + "grad_norm": 2.6667415284136564, + "learning_rate": 2.8145604395604396e-06, + "loss": 0.5752, + "step": 683 + }, + { + "epoch": 0.028224808120821985, + "grad_norm": 20.845632745974836, + "learning_rate": 2.8186813186813186e-06, + "loss": 0.657, + "step": 684 + }, + { + "epoch": 0.02826607246017991, + "grad_norm": 7.184582184882199, + "learning_rate": 2.822802197802198e-06, + "loss": 0.6182, + "step": 685 + }, + { + "epoch": 0.02830733679953784, + "grad_norm": 3.4225886525504023, + "learning_rate": 2.826923076923077e-06, + "loss": 0.6218, + "step": 686 + }, + { + "epoch": 0.028348601138895767, + "grad_norm": 3.3326409517451157, + "learning_rate": 2.8310439560439562e-06, + "loss": 0.6311, + "step": 687 + }, + { + "epoch": 0.028389865478253694, + "grad_norm": 4.522822043331587, + "learning_rate": 2.835164835164835e-06, + "loss": 0.5676, + "step": 688 + }, + { + "epoch": 0.02843112981761162, + "grad_norm": 3.5253845628471234, + "learning_rate": 2.839285714285714e-06, + "loss": 0.6818, + "step": 689 + }, + { + "epoch": 0.028472394156969547, + "grad_norm": 10.925379095845432, + "learning_rate": 2.8434065934065934e-06, + "loss": 0.5985, + "step": 690 + }, + { + "epoch": 0.028513658496327473, + "grad_norm": 4.20071117538536, + "learning_rate": 2.8475274725274724e-06, + "loss": 0.6532, + "step": 691 + }, + { + "epoch": 0.0285549228356854, + "grad_norm": 10.247773087932353, + "learning_rate": 2.8516483516483517e-06, + "loss": 0.6509, + "step": 692 + }, + { + "epoch": 0.02859618717504333, + "grad_norm": 4.975448252269309, + "learning_rate": 2.8557692307692307e-06, + "loss": 0.6767, + "step": 693 + }, + { + "epoch": 0.028637451514401255, + "grad_norm": 3.9875658193866546, + "learning_rate": 2.85989010989011e-06, + "loss": 0.6214, + "step": 694 + }, + { + "epoch": 0.028678715853759182, + "grad_norm": 9.35271261023904, + "learning_rate": 2.8640109890109894e-06, + "loss": 0.6877, + "step": 695 + }, + { + "epoch": 0.028719980193117108, + "grad_norm": 2.7632738318057513, + "learning_rate": 2.8681318681318683e-06, + "loss": 0.5991, + "step": 696 + }, + { + "epoch": 0.028761244532475035, + "grad_norm": 25.144707514274206, + "learning_rate": 2.8722527472527472e-06, + "loss": 0.6092, + "step": 697 + }, + { + "epoch": 0.02880250887183296, + "grad_norm": 3.1786568853134987, + "learning_rate": 2.8763736263736266e-06, + "loss": 0.6307, + "step": 698 + }, + { + "epoch": 0.028843773211190887, + "grad_norm": 3.027555707875884, + "learning_rate": 2.8804945054945055e-06, + "loss": 0.5932, + "step": 699 + }, + { + "epoch": 0.028885037550548817, + "grad_norm": 3.5796706317187654, + "learning_rate": 2.884615384615385e-06, + "loss": 0.5735, + "step": 700 + }, + { + "epoch": 0.028926301889906744, + "grad_norm": 8.328063150241954, + "learning_rate": 2.888736263736264e-06, + "loss": 0.6296, + "step": 701 + }, + { + "epoch": 0.02896756622926467, + "grad_norm": 6.7881075170082354, + "learning_rate": 2.892857142857143e-06, + "loss": 0.6078, + "step": 702 + }, + { + "epoch": 0.029008830568622596, + "grad_norm": 6.021361134750593, + "learning_rate": 2.896978021978022e-06, + "loss": 0.6352, + "step": 703 + }, + { + "epoch": 0.029050094907980523, + "grad_norm": 2.7670914477187623, + "learning_rate": 2.901098901098901e-06, + "loss": 0.5939, + "step": 704 + }, + { + "epoch": 0.02909135924733845, + "grad_norm": 6.452865214008953, + "learning_rate": 2.9052197802197804e-06, + "loss": 0.5769, + "step": 705 + }, + { + "epoch": 0.029132623586696375, + "grad_norm": 3.203640423242981, + "learning_rate": 2.9093406593406593e-06, + "loss": 0.6474, + "step": 706 + }, + { + "epoch": 0.029173887926054305, + "grad_norm": 3.9343633270673153, + "learning_rate": 2.9134615384615387e-06, + "loss": 0.5887, + "step": 707 + }, + { + "epoch": 0.02921515226541223, + "grad_norm": 3.4892107379022987, + "learning_rate": 2.9175824175824176e-06, + "loss": 0.6325, + "step": 708 + }, + { + "epoch": 0.029256416604770158, + "grad_norm": 11.603734460537401, + "learning_rate": 2.9217032967032965e-06, + "loss": 0.5879, + "step": 709 + }, + { + "epoch": 0.029297680944128084, + "grad_norm": 8.740093146232862, + "learning_rate": 2.925824175824176e-06, + "loss": 0.6335, + "step": 710 + }, + { + "epoch": 0.02933894528348601, + "grad_norm": 3.691447924376669, + "learning_rate": 2.929945054945055e-06, + "loss": 0.5842, + "step": 711 + }, + { + "epoch": 0.029380209622843937, + "grad_norm": 3.6811491339622715, + "learning_rate": 2.934065934065934e-06, + "loss": 0.6646, + "step": 712 + }, + { + "epoch": 0.029421473962201867, + "grad_norm": 4.189622926623597, + "learning_rate": 2.938186813186813e-06, + "loss": 0.643, + "step": 713 + }, + { + "epoch": 0.029462738301559793, + "grad_norm": 3.9178670404333586, + "learning_rate": 2.942307692307692e-06, + "loss": 0.7021, + "step": 714 + }, + { + "epoch": 0.02950400264091772, + "grad_norm": 3.166393145144737, + "learning_rate": 2.9464285714285714e-06, + "loss": 0.632, + "step": 715 + }, + { + "epoch": 0.029545266980275646, + "grad_norm": 3.4430043999494337, + "learning_rate": 2.9505494505494503e-06, + "loss": 0.5933, + "step": 716 + }, + { + "epoch": 0.029586531319633572, + "grad_norm": 9.70023918905039, + "learning_rate": 2.9546703296703297e-06, + "loss": 0.6329, + "step": 717 + }, + { + "epoch": 0.0296277956589915, + "grad_norm": 3.3514783981227816, + "learning_rate": 2.958791208791209e-06, + "loss": 0.633, + "step": 718 + }, + { + "epoch": 0.029669059998349425, + "grad_norm": 8.644637553287549, + "learning_rate": 2.962912087912088e-06, + "loss": 0.5915, + "step": 719 + }, + { + "epoch": 0.029710324337707355, + "grad_norm": 3.072012795205112, + "learning_rate": 2.9670329670329673e-06, + "loss": 0.6753, + "step": 720 + }, + { + "epoch": 0.02975158867706528, + "grad_norm": 3.165012051608714, + "learning_rate": 2.9711538461538463e-06, + "loss": 0.6186, + "step": 721 + }, + { + "epoch": 0.029792853016423208, + "grad_norm": 6.04830789844313, + "learning_rate": 2.9752747252747256e-06, + "loss": 0.5781, + "step": 722 + }, + { + "epoch": 0.029834117355781134, + "grad_norm": 5.533269424425352, + "learning_rate": 2.9793956043956046e-06, + "loss": 0.6364, + "step": 723 + }, + { + "epoch": 0.02987538169513906, + "grad_norm": 2.4849919191162995, + "learning_rate": 2.9835164835164835e-06, + "loss": 0.6515, + "step": 724 + }, + { + "epoch": 0.029916646034496987, + "grad_norm": 3.352336186477291, + "learning_rate": 2.987637362637363e-06, + "loss": 0.6098, + "step": 725 + }, + { + "epoch": 0.029957910373854913, + "grad_norm": 3.2166400897172203, + "learning_rate": 2.9917582417582418e-06, + "loss": 0.5838, + "step": 726 + }, + { + "epoch": 0.029999174713212843, + "grad_norm": 4.626441017595814, + "learning_rate": 2.995879120879121e-06, + "loss": 0.6526, + "step": 727 + }, + { + "epoch": 0.03004043905257077, + "grad_norm": 3.6328588652098115, + "learning_rate": 3e-06, + "loss": 0.6305, + "step": 728 + }, + { + "epoch": 0.030081703391928696, + "grad_norm": 3.822362516198517, + "learning_rate": 2.9999999866031237e-06, + "loss": 0.6411, + "step": 729 + }, + { + "epoch": 0.030122967731286622, + "grad_norm": 5.510970165019003, + "learning_rate": 2.999999946412496e-06, + "loss": 0.6401, + "step": 730 + }, + { + "epoch": 0.03016423207064455, + "grad_norm": 3.3960856783747793, + "learning_rate": 2.9999998794281156e-06, + "loss": 0.6597, + "step": 731 + }, + { + "epoch": 0.030205496410002475, + "grad_norm": 3.78379801773496, + "learning_rate": 2.9999997856499856e-06, + "loss": 0.5947, + "step": 732 + }, + { + "epoch": 0.0302467607493604, + "grad_norm": 2.718403404793385, + "learning_rate": 2.9999996650781066e-06, + "loss": 0.5951, + "step": 733 + }, + { + "epoch": 0.03028802508871833, + "grad_norm": 7.471082035301906, + "learning_rate": 2.9999995177124817e-06, + "loss": 0.5572, + "step": 734 + }, + { + "epoch": 0.030329289428076257, + "grad_norm": 4.715876943462599, + "learning_rate": 2.9999993435531124e-06, + "loss": 0.6239, + "step": 735 + }, + { + "epoch": 0.030370553767434184, + "grad_norm": 4.997879834433211, + "learning_rate": 2.999999142600003e-06, + "loss": 0.6346, + "step": 736 + }, + { + "epoch": 0.03041181810679211, + "grad_norm": 4.193536411820203, + "learning_rate": 2.999998914853156e-06, + "loss": 0.6131, + "step": 737 + }, + { + "epoch": 0.030453082446150036, + "grad_norm": 3.4788175563382615, + "learning_rate": 2.999998660312576e-06, + "loss": 0.6838, + "step": 738 + }, + { + "epoch": 0.030494346785507963, + "grad_norm": 7.243628054760976, + "learning_rate": 2.999998378978268e-06, + "loss": 0.6216, + "step": 739 + }, + { + "epoch": 0.030535611124865893, + "grad_norm": 7.5903158398626, + "learning_rate": 2.999998070850236e-06, + "loss": 0.6584, + "step": 740 + }, + { + "epoch": 0.03057687546422382, + "grad_norm": 6.8056410467124255, + "learning_rate": 2.9999977359284866e-06, + "loss": 0.6798, + "step": 741 + }, + { + "epoch": 0.030618139803581745, + "grad_norm": 10.959859680734045, + "learning_rate": 2.9999973742130246e-06, + "loss": 0.5902, + "step": 742 + }, + { + "epoch": 0.03065940414293967, + "grad_norm": 2.633770850862072, + "learning_rate": 2.9999969857038573e-06, + "loss": 0.5949, + "step": 743 + }, + { + "epoch": 0.030700668482297598, + "grad_norm": 4.621009340787699, + "learning_rate": 2.9999965704009915e-06, + "loss": 0.5845, + "step": 744 + }, + { + "epoch": 0.030741932821655524, + "grad_norm": 3.307939274075022, + "learning_rate": 2.9999961283044347e-06, + "loss": 0.6643, + "step": 745 + }, + { + "epoch": 0.03078319716101345, + "grad_norm": 3.7048478205776956, + "learning_rate": 2.999995659414194e-06, + "loss": 0.6118, + "step": 746 + }, + { + "epoch": 0.03082446150037138, + "grad_norm": 4.590429402969553, + "learning_rate": 2.9999951637302788e-06, + "loss": 0.556, + "step": 747 + }, + { + "epoch": 0.030865725839729307, + "grad_norm": 3.166087426442832, + "learning_rate": 2.9999946412526975e-06, + "loss": 0.6783, + "step": 748 + }, + { + "epoch": 0.030906990179087233, + "grad_norm": 4.784399588213716, + "learning_rate": 2.9999940919814595e-06, + "loss": 0.61, + "step": 749 + }, + { + "epoch": 0.03094825451844516, + "grad_norm": 4.339315134460071, + "learning_rate": 2.999993515916575e-06, + "loss": 0.6207, + "step": 750 + }, + { + "epoch": 0.030989518857803086, + "grad_norm": 4.260797343027669, + "learning_rate": 2.9999929130580536e-06, + "loss": 0.6072, + "step": 751 + }, + { + "epoch": 0.031030783197161012, + "grad_norm": 6.934376919363838, + "learning_rate": 2.9999922834059064e-06, + "loss": 0.5573, + "step": 752 + }, + { + "epoch": 0.03107204753651894, + "grad_norm": 9.239485667655787, + "learning_rate": 2.9999916269601444e-06, + "loss": 0.6496, + "step": 753 + }, + { + "epoch": 0.03111331187587687, + "grad_norm": 4.008198584687521, + "learning_rate": 2.9999909437207797e-06, + "loss": 0.6648, + "step": 754 + }, + { + "epoch": 0.031154576215234795, + "grad_norm": 3.109817181306898, + "learning_rate": 2.9999902336878243e-06, + "loss": 0.5981, + "step": 755 + }, + { + "epoch": 0.03119584055459272, + "grad_norm": 4.415780627878862, + "learning_rate": 2.999989496861291e-06, + "loss": 0.5939, + "step": 756 + }, + { + "epoch": 0.031237104893950648, + "grad_norm": 3.2279696332358165, + "learning_rate": 2.999988733241193e-06, + "loss": 0.6548, + "step": 757 + }, + { + "epoch": 0.031278369233308574, + "grad_norm": 6.450141454416855, + "learning_rate": 2.9999879428275437e-06, + "loss": 0.5737, + "step": 758 + }, + { + "epoch": 0.031319633572666504, + "grad_norm": 5.5523699004327804, + "learning_rate": 2.999987125620357e-06, + "loss": 0.5977, + "step": 759 + }, + { + "epoch": 0.03136089791202443, + "grad_norm": 3.5950909407341625, + "learning_rate": 2.9999862816196484e-06, + "loss": 0.5816, + "step": 760 + }, + { + "epoch": 0.03140216225138236, + "grad_norm": 3.1193076091518708, + "learning_rate": 2.9999854108254322e-06, + "loss": 0.6121, + "step": 761 + }, + { + "epoch": 0.03144342659074028, + "grad_norm": 2.790126694527143, + "learning_rate": 2.999984513237724e-06, + "loss": 0.6227, + "step": 762 + }, + { + "epoch": 0.03148469093009821, + "grad_norm": 8.360376774539304, + "learning_rate": 2.9999835888565407e-06, + "loss": 0.6024, + "step": 763 + }, + { + "epoch": 0.03152595526945614, + "grad_norm": 8.829615768327363, + "learning_rate": 2.999982637681897e-06, + "loss": 0.6184, + "step": 764 + }, + { + "epoch": 0.03156721960881406, + "grad_norm": 3.4960818648695513, + "learning_rate": 2.9999816597138117e-06, + "loss": 0.6392, + "step": 765 + }, + { + "epoch": 0.03160848394817199, + "grad_norm": 3.1592705110633945, + "learning_rate": 2.9999806549523018e-06, + "loss": 0.5977, + "step": 766 + }, + { + "epoch": 0.031649748287529915, + "grad_norm": 4.5890318071869505, + "learning_rate": 2.9999796233973846e-06, + "loss": 0.6188, + "step": 767 + }, + { + "epoch": 0.031691012626887845, + "grad_norm": 6.8024094870574725, + "learning_rate": 2.9999785650490793e-06, + "loss": 0.6257, + "step": 768 + }, + { + "epoch": 0.03173227696624577, + "grad_norm": 4.0019530648680215, + "learning_rate": 2.999977479907404e-06, + "loss": 0.6262, + "step": 769 + }, + { + "epoch": 0.0317735413056037, + "grad_norm": 3.806399335266661, + "learning_rate": 2.9999763679723786e-06, + "loss": 0.5797, + "step": 770 + }, + { + "epoch": 0.03181480564496163, + "grad_norm": 3.615233400647248, + "learning_rate": 2.999975229244023e-06, + "loss": 0.5665, + "step": 771 + }, + { + "epoch": 0.03185606998431955, + "grad_norm": 3.868585224039425, + "learning_rate": 2.9999740637223574e-06, + "loss": 0.5792, + "step": 772 + }, + { + "epoch": 0.03189733432367748, + "grad_norm": 2.5074695936511495, + "learning_rate": 2.999972871407403e-06, + "loss": 0.6324, + "step": 773 + }, + { + "epoch": 0.0319385986630354, + "grad_norm": 2.7896391815978534, + "learning_rate": 2.9999716522991807e-06, + "loss": 0.6409, + "step": 774 + }, + { + "epoch": 0.03197986300239333, + "grad_norm": 10.003989639665551, + "learning_rate": 2.999970406397712e-06, + "loss": 0.6072, + "step": 775 + }, + { + "epoch": 0.032021127341751256, + "grad_norm": 3.953660220806217, + "learning_rate": 2.99996913370302e-06, + "loss": 0.6386, + "step": 776 + }, + { + "epoch": 0.032062391681109186, + "grad_norm": 4.18299802959694, + "learning_rate": 2.999967834215126e-06, + "loss": 0.6277, + "step": 777 + }, + { + "epoch": 0.032103656020467115, + "grad_norm": 6.541563059195628, + "learning_rate": 2.999966507934055e-06, + "loss": 0.6264, + "step": 778 + }, + { + "epoch": 0.03214492035982504, + "grad_norm": 3.890362168154781, + "learning_rate": 2.9999651548598296e-06, + "loss": 0.5845, + "step": 779 + }, + { + "epoch": 0.03218618469918297, + "grad_norm": 3.886421424523183, + "learning_rate": 2.9999637749924738e-06, + "loss": 0.6673, + "step": 780 + }, + { + "epoch": 0.03222744903854089, + "grad_norm": 3.6425682782674067, + "learning_rate": 2.999962368332013e-06, + "loss": 0.549, + "step": 781 + }, + { + "epoch": 0.03226871337789882, + "grad_norm": 2.949603143277193, + "learning_rate": 2.999960934878472e-06, + "loss": 0.6848, + "step": 782 + }, + { + "epoch": 0.032309977717256744, + "grad_norm": 3.618446122363329, + "learning_rate": 2.9999594746318762e-06, + "loss": 0.605, + "step": 783 + }, + { + "epoch": 0.032351242056614674, + "grad_norm": 15.860480813293105, + "learning_rate": 2.999957987592252e-06, + "loss": 0.6868, + "step": 784 + }, + { + "epoch": 0.0323925063959726, + "grad_norm": 7.310246874320084, + "learning_rate": 2.999956473759626e-06, + "loss": 0.6641, + "step": 785 + }, + { + "epoch": 0.032433770735330526, + "grad_norm": 2.952639031208254, + "learning_rate": 2.9999549331340247e-06, + "loss": 0.6461, + "step": 786 + }, + { + "epoch": 0.032475035074688456, + "grad_norm": 6.546967180174951, + "learning_rate": 2.9999533657154762e-06, + "loss": 0.6412, + "step": 787 + }, + { + "epoch": 0.03251629941404638, + "grad_norm": 3.353184551630891, + "learning_rate": 2.999951771504008e-06, + "loss": 0.6133, + "step": 788 + }, + { + "epoch": 0.03255756375340431, + "grad_norm": 3.4674901998034677, + "learning_rate": 2.9999501504996494e-06, + "loss": 0.6805, + "step": 789 + }, + { + "epoch": 0.03259882809276223, + "grad_norm": 5.812788575648212, + "learning_rate": 2.9999485027024286e-06, + "loss": 0.6399, + "step": 790 + }, + { + "epoch": 0.03264009243212016, + "grad_norm": 4.122779950716876, + "learning_rate": 2.999946828112375e-06, + "loss": 0.5864, + "step": 791 + }, + { + "epoch": 0.03268135677147809, + "grad_norm": 3.0450356273365395, + "learning_rate": 2.9999451267295184e-06, + "loss": 0.6244, + "step": 792 + }, + { + "epoch": 0.032722621110836014, + "grad_norm": 4.451628053753579, + "learning_rate": 2.9999433985538905e-06, + "loss": 0.6741, + "step": 793 + }, + { + "epoch": 0.032763885450193944, + "grad_norm": 2.259403496228446, + "learning_rate": 2.9999416435855207e-06, + "loss": 0.6065, + "step": 794 + }, + { + "epoch": 0.03280514978955187, + "grad_norm": 5.71430301566135, + "learning_rate": 2.999939861824441e-06, + "loss": 0.6342, + "step": 795 + }, + { + "epoch": 0.0328464141289098, + "grad_norm": 3.178458059305409, + "learning_rate": 2.999938053270683e-06, + "loss": 0.5929, + "step": 796 + }, + { + "epoch": 0.03288767846826772, + "grad_norm": 9.0359271095428, + "learning_rate": 2.9999362179242792e-06, + "loss": 0.6261, + "step": 797 + }, + { + "epoch": 0.03292894280762565, + "grad_norm": 3.5425136767541723, + "learning_rate": 2.9999343557852623e-06, + "loss": 0.6235, + "step": 798 + }, + { + "epoch": 0.03297020714698358, + "grad_norm": 2.6328073851818177, + "learning_rate": 2.999932466853665e-06, + "loss": 0.6482, + "step": 799 + }, + { + "epoch": 0.0330114714863415, + "grad_norm": 5.1717110168811855, + "learning_rate": 2.999930551129522e-06, + "loss": 0.6686, + "step": 800 + }, + { + "epoch": 0.03305273582569943, + "grad_norm": 3.3146893950170386, + "learning_rate": 2.9999286086128676e-06, + "loss": 0.5434, + "step": 801 + }, + { + "epoch": 0.033094000165057355, + "grad_norm": 3.863832603474737, + "learning_rate": 2.999926639303735e-06, + "loss": 0.6755, + "step": 802 + }, + { + "epoch": 0.033135264504415285, + "grad_norm": 4.077680372675728, + "learning_rate": 2.999924643202161e-06, + "loss": 0.6016, + "step": 803 + }, + { + "epoch": 0.03317652884377321, + "grad_norm": 3.0901826343259238, + "learning_rate": 2.999922620308181e-06, + "loss": 0.6184, + "step": 804 + }, + { + "epoch": 0.03321779318313114, + "grad_norm": 4.3621087417568765, + "learning_rate": 2.99992057062183e-06, + "loss": 0.6154, + "step": 805 + }, + { + "epoch": 0.03325905752248907, + "grad_norm": 3.21048878254073, + "learning_rate": 2.999918494143146e-06, + "loss": 0.617, + "step": 806 + }, + { + "epoch": 0.03330032186184699, + "grad_norm": 3.494472659398046, + "learning_rate": 2.999916390872165e-06, + "loss": 0.6117, + "step": 807 + }, + { + "epoch": 0.03334158620120492, + "grad_norm": 11.760861953432805, + "learning_rate": 2.9999142608089253e-06, + "loss": 0.5879, + "step": 808 + }, + { + "epoch": 0.03338285054056284, + "grad_norm": 5.601687906133548, + "learning_rate": 2.999912103953464e-06, + "loss": 0.6218, + "step": 809 + }, + { + "epoch": 0.03342411487992077, + "grad_norm": 4.9537950474263885, + "learning_rate": 2.9999099203058215e-06, + "loss": 0.6197, + "step": 810 + }, + { + "epoch": 0.0334653792192787, + "grad_norm": 2.249416773605982, + "learning_rate": 2.999907709866035e-06, + "loss": 0.5879, + "step": 811 + }, + { + "epoch": 0.033506643558636626, + "grad_norm": 4.6468077255069575, + "learning_rate": 2.9999054726341446e-06, + "loss": 0.594, + "step": 812 + }, + { + "epoch": 0.033547907897994556, + "grad_norm": 4.420458503044866, + "learning_rate": 2.9999032086101903e-06, + "loss": 0.6432, + "step": 813 + }, + { + "epoch": 0.03358917223735248, + "grad_norm": 2.7508867551995024, + "learning_rate": 2.9999009177942125e-06, + "loss": 0.6367, + "step": 814 + }, + { + "epoch": 0.03363043657671041, + "grad_norm": 3.0488386074784692, + "learning_rate": 2.9998986001862522e-06, + "loss": 0.5655, + "step": 815 + }, + { + "epoch": 0.03367170091606833, + "grad_norm": 3.2563999691299452, + "learning_rate": 2.999896255786351e-06, + "loss": 0.6294, + "step": 816 + }, + { + "epoch": 0.03371296525542626, + "grad_norm": 4.425221497864233, + "learning_rate": 2.99989388459455e-06, + "loss": 0.6468, + "step": 817 + }, + { + "epoch": 0.03375422959478419, + "grad_norm": 3.1101761201109572, + "learning_rate": 2.9998914866108925e-06, + "loss": 0.5781, + "step": 818 + }, + { + "epoch": 0.033795493934142114, + "grad_norm": 13.207915850696134, + "learning_rate": 2.9998890618354204e-06, + "loss": 0.6035, + "step": 819 + }, + { + "epoch": 0.033836758273500044, + "grad_norm": 5.768448959156861, + "learning_rate": 2.9998866102681776e-06, + "loss": 0.6411, + "step": 820 + }, + { + "epoch": 0.033878022612857966, + "grad_norm": 7.294776851848584, + "learning_rate": 2.999884131909208e-06, + "loss": 0.6181, + "step": 821 + }, + { + "epoch": 0.033919286952215896, + "grad_norm": 4.227820206251395, + "learning_rate": 2.9998816267585556e-06, + "loss": 0.5912, + "step": 822 + }, + { + "epoch": 0.03396055129157382, + "grad_norm": 4.789218415335382, + "learning_rate": 2.9998790948162654e-06, + "loss": 0.5916, + "step": 823 + }, + { + "epoch": 0.03400181563093175, + "grad_norm": 3.4753405714588275, + "learning_rate": 2.9998765360823823e-06, + "loss": 0.5969, + "step": 824 + }, + { + "epoch": 0.03404307997028968, + "grad_norm": 3.8469646107166455, + "learning_rate": 2.999873950556952e-06, + "loss": 0.6046, + "step": 825 + }, + { + "epoch": 0.0340843443096476, + "grad_norm": 3.8741090356468453, + "learning_rate": 2.9998713382400215e-06, + "loss": 0.6722, + "step": 826 + }, + { + "epoch": 0.03412560864900553, + "grad_norm": 4.9623883725997, + "learning_rate": 2.999868699131636e-06, + "loss": 0.556, + "step": 827 + }, + { + "epoch": 0.034166872988363454, + "grad_norm": 2.597622560411897, + "learning_rate": 2.9998660332318437e-06, + "loss": 0.5742, + "step": 828 + }, + { + "epoch": 0.034208137327721384, + "grad_norm": 3.7232829284939086, + "learning_rate": 2.999863340540692e-06, + "loss": 0.6464, + "step": 829 + }, + { + "epoch": 0.03424940166707931, + "grad_norm": 2.5531456546555584, + "learning_rate": 2.9998606210582285e-06, + "loss": 0.6204, + "step": 830 + }, + { + "epoch": 0.03429066600643724, + "grad_norm": 4.063925063172252, + "learning_rate": 2.999857874784503e-06, + "loss": 0.5775, + "step": 831 + }, + { + "epoch": 0.03433193034579517, + "grad_norm": 2.872561522878191, + "learning_rate": 2.9998551017195633e-06, + "loss": 0.6528, + "step": 832 + }, + { + "epoch": 0.03437319468515309, + "grad_norm": 4.3001341627951835, + "learning_rate": 2.9998523018634594e-06, + "loss": 0.6512, + "step": 833 + }, + { + "epoch": 0.03441445902451102, + "grad_norm": 10.933758803109006, + "learning_rate": 2.999849475216242e-06, + "loss": 0.6439, + "step": 834 + }, + { + "epoch": 0.03445572336386894, + "grad_norm": 18.18391443513992, + "learning_rate": 2.9998466217779605e-06, + "loss": 0.5807, + "step": 835 + }, + { + "epoch": 0.03449698770322687, + "grad_norm": 2.8835225208747324, + "learning_rate": 2.999843741548666e-06, + "loss": 0.6022, + "step": 836 + }, + { + "epoch": 0.034538252042584795, + "grad_norm": 59.92121283947522, + "learning_rate": 2.9998408345284102e-06, + "loss": 0.5745, + "step": 837 + }, + { + "epoch": 0.034579516381942725, + "grad_norm": 5.127671897008567, + "learning_rate": 2.9998379007172454e-06, + "loss": 0.578, + "step": 838 + }, + { + "epoch": 0.034620780721300655, + "grad_norm": 6.825655976987479, + "learning_rate": 2.9998349401152238e-06, + "loss": 0.5989, + "step": 839 + }, + { + "epoch": 0.03466204506065858, + "grad_norm": 3.6835410779334286, + "learning_rate": 2.999831952722398e-06, + "loss": 0.6314, + "step": 840 + }, + { + "epoch": 0.03470330940001651, + "grad_norm": 3.0606372337355086, + "learning_rate": 2.999828938538821e-06, + "loss": 0.6747, + "step": 841 + }, + { + "epoch": 0.03474457373937443, + "grad_norm": 3.5861423162596155, + "learning_rate": 2.9998258975645478e-06, + "loss": 0.6283, + "step": 842 + }, + { + "epoch": 0.03478583807873236, + "grad_norm": 6.229610798321032, + "learning_rate": 2.9998228297996323e-06, + "loss": 0.586, + "step": 843 + }, + { + "epoch": 0.03482710241809028, + "grad_norm": 2.704297626486596, + "learning_rate": 2.999819735244128e-06, + "loss": 0.5921, + "step": 844 + }, + { + "epoch": 0.03486836675744821, + "grad_norm": 7.7372746725776205, + "learning_rate": 2.999816613898092e-06, + "loss": 0.6652, + "step": 845 + }, + { + "epoch": 0.03490963109680614, + "grad_norm": 4.433227744124158, + "learning_rate": 2.9998134657615797e-06, + "loss": 0.606, + "step": 846 + }, + { + "epoch": 0.034950895436164066, + "grad_norm": 2.5261432480741437, + "learning_rate": 2.9998102908346467e-06, + "loss": 0.6078, + "step": 847 + }, + { + "epoch": 0.034992159775521996, + "grad_norm": 4.491306131874911, + "learning_rate": 2.9998070891173495e-06, + "loss": 0.5726, + "step": 848 + }, + { + "epoch": 0.03503342411487992, + "grad_norm": 4.007704455195431, + "learning_rate": 2.999803860609746e-06, + "loss": 0.6038, + "step": 849 + }, + { + "epoch": 0.03507468845423785, + "grad_norm": 3.1753610910910512, + "learning_rate": 2.9998006053118937e-06, + "loss": 0.6308, + "step": 850 + }, + { + "epoch": 0.03511595279359577, + "grad_norm": 4.076268449754573, + "learning_rate": 2.9997973232238507e-06, + "loss": 0.6478, + "step": 851 + }, + { + "epoch": 0.0351572171329537, + "grad_norm": 3.1114634068234164, + "learning_rate": 2.999794014345675e-06, + "loss": 0.6717, + "step": 852 + }, + { + "epoch": 0.03519848147231163, + "grad_norm": 5.975763193005756, + "learning_rate": 2.999790678677427e-06, + "loss": 0.6413, + "step": 853 + }, + { + "epoch": 0.035239745811669554, + "grad_norm": 2.8337082437412104, + "learning_rate": 2.9997873162191653e-06, + "loss": 0.5385, + "step": 854 + }, + { + "epoch": 0.035281010151027484, + "grad_norm": 3.8625714905511526, + "learning_rate": 2.9997839269709505e-06, + "loss": 0.6151, + "step": 855 + }, + { + "epoch": 0.03532227449038541, + "grad_norm": 3.731945502847763, + "learning_rate": 2.9997805109328428e-06, + "loss": 0.5752, + "step": 856 + }, + { + "epoch": 0.035363538829743336, + "grad_norm": 5.005088408862152, + "learning_rate": 2.999777068104903e-06, + "loss": 0.6339, + "step": 857 + }, + { + "epoch": 0.03540480316910126, + "grad_norm": 3.5605847669662074, + "learning_rate": 2.999773598487193e-06, + "loss": 0.6318, + "step": 858 + }, + { + "epoch": 0.03544606750845919, + "grad_norm": 2.896179412008992, + "learning_rate": 2.9997701020797746e-06, + "loss": 0.6063, + "step": 859 + }, + { + "epoch": 0.03548733184781712, + "grad_norm": 2.8050361962547035, + "learning_rate": 2.9997665788827106e-06, + "loss": 0.5639, + "step": 860 + }, + { + "epoch": 0.03552859618717504, + "grad_norm": 15.527536818431026, + "learning_rate": 2.9997630288960636e-06, + "loss": 0.6106, + "step": 861 + }, + { + "epoch": 0.03556986052653297, + "grad_norm": 2.9801400066812547, + "learning_rate": 2.9997594521198966e-06, + "loss": 0.6274, + "step": 862 + }, + { + "epoch": 0.035611124865890895, + "grad_norm": 3.0492049797752983, + "learning_rate": 2.9997558485542746e-06, + "loss": 0.6039, + "step": 863 + }, + { + "epoch": 0.035652389205248824, + "grad_norm": 8.890847241379463, + "learning_rate": 2.999752218199261e-06, + "loss": 0.6004, + "step": 864 + }, + { + "epoch": 0.03569365354460675, + "grad_norm": 8.543833032802109, + "learning_rate": 2.999748561054921e-06, + "loss": 0.647, + "step": 865 + }, + { + "epoch": 0.03573491788396468, + "grad_norm": 5.9120500355378, + "learning_rate": 2.99974487712132e-06, + "loss": 0.6459, + "step": 866 + }, + { + "epoch": 0.03577618222332261, + "grad_norm": 2.6502188854334134, + "learning_rate": 2.9997411663985234e-06, + "loss": 0.5933, + "step": 867 + }, + { + "epoch": 0.03581744656268053, + "grad_norm": 3.725503187223495, + "learning_rate": 2.9997374288865984e-06, + "loss": 0.6896, + "step": 868 + }, + { + "epoch": 0.03585871090203846, + "grad_norm": 3.3623283374358954, + "learning_rate": 2.9997336645856106e-06, + "loss": 0.6269, + "step": 869 + }, + { + "epoch": 0.03589997524139638, + "grad_norm": 4.162160681813689, + "learning_rate": 2.999729873495628e-06, + "loss": 0.6082, + "step": 870 + }, + { + "epoch": 0.03594123958075431, + "grad_norm": 8.417035852250859, + "learning_rate": 2.9997260556167184e-06, + "loss": 0.6039, + "step": 871 + }, + { + "epoch": 0.03598250392011224, + "grad_norm": 3.0187144689079304, + "learning_rate": 2.999722210948949e-06, + "loss": 0.5874, + "step": 872 + }, + { + "epoch": 0.036023768259470165, + "grad_norm": 4.446132411974261, + "learning_rate": 2.99971833949239e-06, + "loss": 0.5463, + "step": 873 + }, + { + "epoch": 0.036065032598828095, + "grad_norm": 3.6323302005559137, + "learning_rate": 2.9997144412471088e-06, + "loss": 0.6122, + "step": 874 + }, + { + "epoch": 0.03610629693818602, + "grad_norm": 4.316187885791084, + "learning_rate": 2.999710516213177e-06, + "loss": 0.5929, + "step": 875 + }, + { + "epoch": 0.03614756127754395, + "grad_norm": 4.063884604008623, + "learning_rate": 2.999706564390663e-06, + "loss": 0.6315, + "step": 876 + }, + { + "epoch": 0.03618882561690187, + "grad_norm": 4.958604660794896, + "learning_rate": 2.9997025857796386e-06, + "loss": 0.6783, + "step": 877 + }, + { + "epoch": 0.0362300899562598, + "grad_norm": 3.96217740572616, + "learning_rate": 2.9996985803801736e-06, + "loss": 0.6157, + "step": 878 + }, + { + "epoch": 0.03627135429561773, + "grad_norm": 13.152788762546958, + "learning_rate": 2.999694548192341e-06, + "loss": 0.653, + "step": 879 + }, + { + "epoch": 0.03631261863497565, + "grad_norm": 6.839329776089883, + "learning_rate": 2.9996904892162115e-06, + "loss": 0.6674, + "step": 880 + }, + { + "epoch": 0.03635388297433358, + "grad_norm": 2.965947264738655, + "learning_rate": 2.999686403451859e-06, + "loss": 0.6476, + "step": 881 + }, + { + "epoch": 0.036395147313691506, + "grad_norm": 2.6535017126904643, + "learning_rate": 2.9996822908993548e-06, + "loss": 0.6515, + "step": 882 + }, + { + "epoch": 0.036436411653049436, + "grad_norm": 3.7967448158293613, + "learning_rate": 2.9996781515587735e-06, + "loss": 0.5525, + "step": 883 + }, + { + "epoch": 0.03647767599240736, + "grad_norm": 3.512769764868646, + "learning_rate": 2.999673985430189e-06, + "loss": 0.6002, + "step": 884 + }, + { + "epoch": 0.03651894033176529, + "grad_norm": 3.0227951207359682, + "learning_rate": 2.9996697925136755e-06, + "loss": 0.5867, + "step": 885 + }, + { + "epoch": 0.03656020467112322, + "grad_norm": 14.730507223419574, + "learning_rate": 2.9996655728093077e-06, + "loss": 0.633, + "step": 886 + }, + { + "epoch": 0.03660146901048114, + "grad_norm": 8.136558979771046, + "learning_rate": 2.9996613263171613e-06, + "loss": 0.5935, + "step": 887 + }, + { + "epoch": 0.03664273334983907, + "grad_norm": 30.13649243847271, + "learning_rate": 2.999657053037312e-06, + "loss": 0.6707, + "step": 888 + }, + { + "epoch": 0.036683997689196994, + "grad_norm": 2.3790060992232203, + "learning_rate": 2.9996527529698366e-06, + "loss": 0.5557, + "step": 889 + }, + { + "epoch": 0.036725262028554924, + "grad_norm": 3.0975778729525194, + "learning_rate": 2.999648426114811e-06, + "loss": 0.6259, + "step": 890 + }, + { + "epoch": 0.03676652636791285, + "grad_norm": 3.558170476146372, + "learning_rate": 2.999644072472313e-06, + "loss": 0.5986, + "step": 891 + }, + { + "epoch": 0.03680779070727078, + "grad_norm": 6.064885278809437, + "learning_rate": 2.9996396920424203e-06, + "loss": 0.6173, + "step": 892 + }, + { + "epoch": 0.036849055046628706, + "grad_norm": 7.289163892523253, + "learning_rate": 2.999635284825211e-06, + "loss": 0.665, + "step": 893 + }, + { + "epoch": 0.03689031938598663, + "grad_norm": 24.226338339229184, + "learning_rate": 2.9996308508207647e-06, + "loss": 0.5902, + "step": 894 + }, + { + "epoch": 0.03693158372534456, + "grad_norm": 3.889035729609812, + "learning_rate": 2.9996263900291594e-06, + "loss": 0.6428, + "step": 895 + }, + { + "epoch": 0.03697284806470248, + "grad_norm": 5.749165666629113, + "learning_rate": 2.9996219024504755e-06, + "loss": 0.6633, + "step": 896 + }, + { + "epoch": 0.03701411240406041, + "grad_norm": 4.049779461254793, + "learning_rate": 2.9996173880847927e-06, + "loss": 0.5902, + "step": 897 + }, + { + "epoch": 0.037055376743418335, + "grad_norm": 4.4888103252248674, + "learning_rate": 2.999612846932192e-06, + "loss": 0.6228, + "step": 898 + }, + { + "epoch": 0.037096641082776265, + "grad_norm": 10.205923400759959, + "learning_rate": 2.999608278992754e-06, + "loss": 0.61, + "step": 899 + }, + { + "epoch": 0.037137905422134194, + "grad_norm": 5.088445476232115, + "learning_rate": 2.999603684266561e-06, + "loss": 0.5773, + "step": 900 + }, + { + "epoch": 0.03717916976149212, + "grad_norm": 2.509572434842986, + "learning_rate": 2.999599062753695e-06, + "loss": 0.6343, + "step": 901 + }, + { + "epoch": 0.03722043410085005, + "grad_norm": 5.1185345310892965, + "learning_rate": 2.9995944144542383e-06, + "loss": 0.6537, + "step": 902 + }, + { + "epoch": 0.03726169844020797, + "grad_norm": 4.788525320289573, + "learning_rate": 2.999589739368274e-06, + "loss": 0.5612, + "step": 903 + }, + { + "epoch": 0.0373029627795659, + "grad_norm": 8.458357301795752, + "learning_rate": 2.999585037495885e-06, + "loss": 0.672, + "step": 904 + }, + { + "epoch": 0.03734422711892382, + "grad_norm": 4.658013127376937, + "learning_rate": 2.9995803088371563e-06, + "loss": 0.6409, + "step": 905 + }, + { + "epoch": 0.03738549145828175, + "grad_norm": 6.359623080772947, + "learning_rate": 2.9995755533921722e-06, + "loss": 0.6481, + "step": 906 + }, + { + "epoch": 0.03742675579763968, + "grad_norm": 3.338822767987583, + "learning_rate": 2.9995707711610167e-06, + "loss": 0.6459, + "step": 907 + }, + { + "epoch": 0.037468020136997605, + "grad_norm": 4.904005957779167, + "learning_rate": 2.999565962143776e-06, + "loss": 0.5551, + "step": 908 + }, + { + "epoch": 0.037509284476355535, + "grad_norm": 4.140886325504715, + "learning_rate": 2.999561126340536e-06, + "loss": 0.5895, + "step": 909 + }, + { + "epoch": 0.03755054881571346, + "grad_norm": 3.8728494239401323, + "learning_rate": 2.999556263751383e-06, + "loss": 0.6196, + "step": 910 + }, + { + "epoch": 0.03759181315507139, + "grad_norm": 4.754556708990842, + "learning_rate": 2.9995513743764037e-06, + "loss": 0.6052, + "step": 911 + }, + { + "epoch": 0.03763307749442931, + "grad_norm": 2.6540345131755694, + "learning_rate": 2.999546458215686e-06, + "loss": 0.5633, + "step": 912 + }, + { + "epoch": 0.03767434183378724, + "grad_norm": 5.351789023701954, + "learning_rate": 2.9995415152693164e-06, + "loss": 0.6467, + "step": 913 + }, + { + "epoch": 0.03771560617314517, + "grad_norm": 4.587226010943712, + "learning_rate": 2.999536545537385e-06, + "loss": 0.6393, + "step": 914 + }, + { + "epoch": 0.03775687051250309, + "grad_norm": 4.339047389495283, + "learning_rate": 2.9995315490199788e-06, + "loss": 0.6562, + "step": 915 + }, + { + "epoch": 0.03779813485186102, + "grad_norm": 6.033925508050782, + "learning_rate": 2.999526525717188e-06, + "loss": 0.6019, + "step": 916 + }, + { + "epoch": 0.037839399191218946, + "grad_norm": 5.907396714207022, + "learning_rate": 2.999521475629103e-06, + "loss": 0.6337, + "step": 917 + }, + { + "epoch": 0.037880663530576876, + "grad_norm": 3.845795689848923, + "learning_rate": 2.999516398755813e-06, + "loss": 0.6024, + "step": 918 + }, + { + "epoch": 0.0379219278699348, + "grad_norm": 2.3107725234430867, + "learning_rate": 2.999511295097408e-06, + "loss": 0.5622, + "step": 919 + }, + { + "epoch": 0.03796319220929273, + "grad_norm": 6.232415543099438, + "learning_rate": 2.9995061646539807e-06, + "loss": 0.6515, + "step": 920 + }, + { + "epoch": 0.03800445654865066, + "grad_norm": 11.167642220732583, + "learning_rate": 2.9995010074256224e-06, + "loss": 0.6831, + "step": 921 + }, + { + "epoch": 0.03804572088800858, + "grad_norm": 13.637237019111831, + "learning_rate": 2.999495823412424e-06, + "loss": 0.618, + "step": 922 + }, + { + "epoch": 0.03808698522736651, + "grad_norm": 19.43039477690515, + "learning_rate": 2.9994906126144797e-06, + "loss": 0.6129, + "step": 923 + }, + { + "epoch": 0.038128249566724434, + "grad_norm": 3.9484610131233064, + "learning_rate": 2.999485375031882e-06, + "loss": 0.5902, + "step": 924 + }, + { + "epoch": 0.038169513906082364, + "grad_norm": 2.4261335735493326, + "learning_rate": 2.9994801106647247e-06, + "loss": 0.5873, + "step": 925 + }, + { + "epoch": 0.038210778245440294, + "grad_norm": 4.80600794453795, + "learning_rate": 2.9994748195131004e-06, + "loss": 0.5842, + "step": 926 + }, + { + "epoch": 0.03825204258479822, + "grad_norm": 2.760873194158365, + "learning_rate": 2.9994695015771056e-06, + "loss": 0.6156, + "step": 927 + }, + { + "epoch": 0.03829330692415615, + "grad_norm": 13.738596630118693, + "learning_rate": 2.999464156856834e-06, + "loss": 0.6791, + "step": 928 + }, + { + "epoch": 0.03833457126351407, + "grad_norm": 2.9368307943451373, + "learning_rate": 2.999458785352382e-06, + "loss": 0.6141, + "step": 929 + }, + { + "epoch": 0.038375835602872, + "grad_norm": 2.7377375768707046, + "learning_rate": 2.9994533870638445e-06, + "loss": 0.5871, + "step": 930 + }, + { + "epoch": 0.03841709994222992, + "grad_norm": 3.6784623271115975, + "learning_rate": 2.9994479619913185e-06, + "loss": 0.5879, + "step": 931 + }, + { + "epoch": 0.03845836428158785, + "grad_norm": 4.741905769151594, + "learning_rate": 2.999442510134901e-06, + "loss": 0.6117, + "step": 932 + }, + { + "epoch": 0.03849962862094578, + "grad_norm": 3.129414631617632, + "learning_rate": 2.999437031494689e-06, + "loss": 0.6473, + "step": 933 + }, + { + "epoch": 0.038540892960303705, + "grad_norm": 4.854912666407359, + "learning_rate": 2.999431526070781e-06, + "loss": 0.5648, + "step": 934 + }, + { + "epoch": 0.038582157299661635, + "grad_norm": 4.874803775313819, + "learning_rate": 2.9994259938632746e-06, + "loss": 0.5931, + "step": 935 + }, + { + "epoch": 0.03862342163901956, + "grad_norm": 4.226915383786692, + "learning_rate": 2.9994204348722693e-06, + "loss": 0.581, + "step": 936 + }, + { + "epoch": 0.03866468597837749, + "grad_norm": 3.808112897217757, + "learning_rate": 2.9994148490978636e-06, + "loss": 0.6156, + "step": 937 + }, + { + "epoch": 0.03870595031773541, + "grad_norm": 3.4700177287829965, + "learning_rate": 2.999409236540158e-06, + "loss": 0.6095, + "step": 938 + }, + { + "epoch": 0.03874721465709334, + "grad_norm": 4.9485236863293, + "learning_rate": 2.9994035971992525e-06, + "loss": 0.5815, + "step": 939 + }, + { + "epoch": 0.03878847899645127, + "grad_norm": 4.605639994132548, + "learning_rate": 2.9993979310752477e-06, + "loss": 0.6199, + "step": 940 + }, + { + "epoch": 0.03882974333580919, + "grad_norm": 8.775644524861283, + "learning_rate": 2.999392238168245e-06, + "loss": 0.6205, + "step": 941 + }, + { + "epoch": 0.03887100767516712, + "grad_norm": 3.0092131773120845, + "learning_rate": 2.999386518478346e-06, + "loss": 0.6751, + "step": 942 + }, + { + "epoch": 0.038912272014525046, + "grad_norm": 4.695298676409299, + "learning_rate": 2.999380772005653e-06, + "loss": 0.6093, + "step": 943 + }, + { + "epoch": 0.038953536353882975, + "grad_norm": 4.315322245538347, + "learning_rate": 2.9993749987502685e-06, + "loss": 0.637, + "step": 944 + }, + { + "epoch": 0.0389948006932409, + "grad_norm": 3.9653939741923176, + "learning_rate": 2.9993691987122953e-06, + "loss": 0.6171, + "step": 945 + }, + { + "epoch": 0.03903606503259883, + "grad_norm": 4.747088799314899, + "learning_rate": 2.9993633718918374e-06, + "loss": 0.5771, + "step": 946 + }, + { + "epoch": 0.03907732937195676, + "grad_norm": 4.559384033383131, + "learning_rate": 2.9993575182889995e-06, + "loss": 0.627, + "step": 947 + }, + { + "epoch": 0.03911859371131468, + "grad_norm": 7.683318250706434, + "learning_rate": 2.999351637903885e-06, + "loss": 0.6359, + "step": 948 + }, + { + "epoch": 0.03915985805067261, + "grad_norm": 2.4494404431627186, + "learning_rate": 2.9993457307365996e-06, + "loss": 0.6214, + "step": 949 + }, + { + "epoch": 0.039201122390030534, + "grad_norm": 6.531592139658458, + "learning_rate": 2.9993397967872483e-06, + "loss": 0.6575, + "step": 950 + }, + { + "epoch": 0.03924238672938846, + "grad_norm": 2.685492933327545, + "learning_rate": 2.9993338360559378e-06, + "loss": 0.6151, + "step": 951 + }, + { + "epoch": 0.039283651068746386, + "grad_norm": 3.218629093216305, + "learning_rate": 2.9993278485427745e-06, + "loss": 0.6368, + "step": 952 + }, + { + "epoch": 0.039324915408104316, + "grad_norm": 3.003857120069308, + "learning_rate": 2.9993218342478647e-06, + "loss": 0.6295, + "step": 953 + }, + { + "epoch": 0.039366179747462246, + "grad_norm": 3.9297123473351365, + "learning_rate": 2.999315793171316e-06, + "loss": 0.6425, + "step": 954 + }, + { + "epoch": 0.03940744408682017, + "grad_norm": 3.160243786910019, + "learning_rate": 2.9993097253132373e-06, + "loss": 0.6197, + "step": 955 + }, + { + "epoch": 0.0394487084261781, + "grad_norm": 3.4703245525403554, + "learning_rate": 2.999303630673736e-06, + "loss": 0.6798, + "step": 956 + }, + { + "epoch": 0.03948997276553602, + "grad_norm": 5.477692686980718, + "learning_rate": 2.9992975092529206e-06, + "loss": 0.5789, + "step": 957 + }, + { + "epoch": 0.03953123710489395, + "grad_norm": 9.138397374665454, + "learning_rate": 2.9992913610509013e-06, + "loss": 0.6344, + "step": 958 + }, + { + "epoch": 0.039572501444251874, + "grad_norm": 4.965024081281493, + "learning_rate": 2.9992851860677877e-06, + "loss": 0.6038, + "step": 959 + }, + { + "epoch": 0.039613765783609804, + "grad_norm": 3.587261236920227, + "learning_rate": 2.99927898430369e-06, + "loss": 0.6079, + "step": 960 + }, + { + "epoch": 0.039655030122967734, + "grad_norm": 3.34258333158821, + "learning_rate": 2.9992727557587194e-06, + "loss": 0.6261, + "step": 961 + }, + { + "epoch": 0.03969629446232566, + "grad_norm": 5.8374019357836575, + "learning_rate": 2.9992665004329857e-06, + "loss": 0.6201, + "step": 962 + }, + { + "epoch": 0.03973755880168359, + "grad_norm": 3.7347187979646224, + "learning_rate": 2.9992602183266025e-06, + "loss": 0.6634, + "step": 963 + }, + { + "epoch": 0.03977882314104151, + "grad_norm": 11.652736002520566, + "learning_rate": 2.999253909439681e-06, + "loss": 0.5507, + "step": 964 + }, + { + "epoch": 0.03982008748039944, + "grad_norm": 2.747596083392162, + "learning_rate": 2.9992475737723345e-06, + "loss": 0.6371, + "step": 965 + }, + { + "epoch": 0.03986135181975736, + "grad_norm": 6.669395271547374, + "learning_rate": 2.9992412113246755e-06, + "loss": 0.575, + "step": 966 + }, + { + "epoch": 0.03990261615911529, + "grad_norm": 4.429046804381565, + "learning_rate": 2.9992348220968183e-06, + "loss": 0.6868, + "step": 967 + }, + { + "epoch": 0.03994388049847322, + "grad_norm": 4.15122332665555, + "learning_rate": 2.999228406088876e-06, + "loss": 0.6244, + "step": 968 + }, + { + "epoch": 0.039985144837831145, + "grad_norm": 2.9752374799972126, + "learning_rate": 2.999221963300964e-06, + "loss": 0.6107, + "step": 969 + }, + { + "epoch": 0.040026409177189075, + "grad_norm": 2.679227545797541, + "learning_rate": 2.999215493733198e-06, + "loss": 0.5375, + "step": 970 + }, + { + "epoch": 0.040067673516547, + "grad_norm": 3.982454791624781, + "learning_rate": 2.999208997385692e-06, + "loss": 0.5528, + "step": 971 + }, + { + "epoch": 0.04010893785590493, + "grad_norm": 2.7627386353515844, + "learning_rate": 2.999202474258563e-06, + "loss": 0.6356, + "step": 972 + }, + { + "epoch": 0.04015020219526285, + "grad_norm": 3.081204187024519, + "learning_rate": 2.999195924351928e-06, + "loss": 0.6548, + "step": 973 + }, + { + "epoch": 0.04019146653462078, + "grad_norm": 5.224282051087285, + "learning_rate": 2.9991893476659026e-06, + "loss": 0.6365, + "step": 974 + }, + { + "epoch": 0.04023273087397871, + "grad_norm": 3.111383793531352, + "learning_rate": 2.9991827442006055e-06, + "loss": 0.6358, + "step": 975 + }, + { + "epoch": 0.04027399521333663, + "grad_norm": 4.345929919385973, + "learning_rate": 2.999176113956154e-06, + "loss": 0.5814, + "step": 976 + }, + { + "epoch": 0.04031525955269456, + "grad_norm": 2.4475454563179246, + "learning_rate": 2.999169456932667e-06, + "loss": 0.6192, + "step": 977 + }, + { + "epoch": 0.040356523892052486, + "grad_norm": 4.435471771625156, + "learning_rate": 2.9991627731302626e-06, + "loss": 0.6426, + "step": 978 + }, + { + "epoch": 0.040397788231410416, + "grad_norm": 2.353515399189526, + "learning_rate": 2.9991560625490612e-06, + "loss": 0.5796, + "step": 979 + }, + { + "epoch": 0.040439052570768345, + "grad_norm": 16.070835530345335, + "learning_rate": 2.9991493251891817e-06, + "loss": 0.7225, + "step": 980 + }, + { + "epoch": 0.04048031691012627, + "grad_norm": 3.022434853075375, + "learning_rate": 2.999142561050746e-06, + "loss": 0.6412, + "step": 981 + }, + { + "epoch": 0.0405215812494842, + "grad_norm": 3.0157884232294565, + "learning_rate": 2.999135770133873e-06, + "loss": 0.6607, + "step": 982 + }, + { + "epoch": 0.04056284558884212, + "grad_norm": 7.14917404111935, + "learning_rate": 2.9991289524386854e-06, + "loss": 0.6036, + "step": 983 + }, + { + "epoch": 0.04060410992820005, + "grad_norm": 8.932275814388527, + "learning_rate": 2.9991221079653038e-06, + "loss": 0.6192, + "step": 984 + }, + { + "epoch": 0.040645374267557974, + "grad_norm": 8.488032379320071, + "learning_rate": 2.9991152367138514e-06, + "loss": 0.5512, + "step": 985 + }, + { + "epoch": 0.040686638606915904, + "grad_norm": 7.0193469883012645, + "learning_rate": 2.999108338684451e-06, + "loss": 0.6125, + "step": 986 + }, + { + "epoch": 0.04072790294627383, + "grad_norm": 2.956561469290465, + "learning_rate": 2.9991014138772256e-06, + "loss": 0.6209, + "step": 987 + }, + { + "epoch": 0.040769167285631756, + "grad_norm": 3.8885665371797, + "learning_rate": 2.999094462292298e-06, + "loss": 0.6445, + "step": 988 + }, + { + "epoch": 0.040810431624989686, + "grad_norm": 2.6596010362439357, + "learning_rate": 2.999087483929794e-06, + "loss": 0.6118, + "step": 989 + }, + { + "epoch": 0.04085169596434761, + "grad_norm": 9.229862163027255, + "learning_rate": 2.9990804787898365e-06, + "loss": 0.642, + "step": 990 + }, + { + "epoch": 0.04089296030370554, + "grad_norm": 3.634696542941843, + "learning_rate": 2.999073446872552e-06, + "loss": 0.5602, + "step": 991 + }, + { + "epoch": 0.04093422464306346, + "grad_norm": 7.699697288430596, + "learning_rate": 2.9990663881780656e-06, + "loss": 0.6173, + "step": 992 + }, + { + "epoch": 0.04097548898242139, + "grad_norm": 10.504080830582199, + "learning_rate": 2.9990593027065036e-06, + "loss": 0.6385, + "step": 993 + }, + { + "epoch": 0.04101675332177932, + "grad_norm": 3.5326656241532706, + "learning_rate": 2.999052190457992e-06, + "loss": 0.6052, + "step": 994 + }, + { + "epoch": 0.041058017661137244, + "grad_norm": 4.482177377883384, + "learning_rate": 2.9990450514326586e-06, + "loss": 0.5916, + "step": 995 + }, + { + "epoch": 0.041099282000495174, + "grad_norm": 3.2308293883103345, + "learning_rate": 2.99903788563063e-06, + "loss": 0.5986, + "step": 996 + }, + { + "epoch": 0.0411405463398531, + "grad_norm": 3.523057750850772, + "learning_rate": 2.9990306930520356e-06, + "loss": 0.5701, + "step": 997 + }, + { + "epoch": 0.04118181067921103, + "grad_norm": 2.961250871150576, + "learning_rate": 2.999023473697002e-06, + "loss": 0.6374, + "step": 998 + }, + { + "epoch": 0.04122307501856895, + "grad_norm": 4.1372644151949975, + "learning_rate": 2.99901622756566e-06, + "loss": 0.6058, + "step": 999 + }, + { + "epoch": 0.04126433935792688, + "grad_norm": 11.104194708345283, + "learning_rate": 2.9990089546581383e-06, + "loss": 0.5713, + "step": 1000 + }, + { + "epoch": 0.04130560369728481, + "grad_norm": 3.9182223685105737, + "learning_rate": 2.9990016549745663e-06, + "loss": 0.6471, + "step": 1001 + }, + { + "epoch": 0.04134686803664273, + "grad_norm": 2.9300446422486672, + "learning_rate": 2.998994328515075e-06, + "loss": 0.65, + "step": 1002 + }, + { + "epoch": 0.04138813237600066, + "grad_norm": 4.579370305336834, + "learning_rate": 2.9989869752797948e-06, + "loss": 0.6159, + "step": 1003 + }, + { + "epoch": 0.041429396715358585, + "grad_norm": 5.3592139136460215, + "learning_rate": 2.9989795952688574e-06, + "loss": 0.5897, + "step": 1004 + }, + { + "epoch": 0.041470661054716515, + "grad_norm": 4.634031636464021, + "learning_rate": 2.9989721884823945e-06, + "loss": 0.576, + "step": 1005 + }, + { + "epoch": 0.04151192539407444, + "grad_norm": 3.0538290771393446, + "learning_rate": 2.9989647549205387e-06, + "loss": 0.5857, + "step": 1006 + }, + { + "epoch": 0.04155318973343237, + "grad_norm": 16.695126795168672, + "learning_rate": 2.9989572945834226e-06, + "loss": 0.6028, + "step": 1007 + }, + { + "epoch": 0.0415944540727903, + "grad_norm": 3.5600464376093797, + "learning_rate": 2.998949807471179e-06, + "loss": 0.5672, + "step": 1008 + }, + { + "epoch": 0.04163571841214822, + "grad_norm": 3.259072131170457, + "learning_rate": 2.9989422935839426e-06, + "loss": 0.5987, + "step": 1009 + }, + { + "epoch": 0.04167698275150615, + "grad_norm": 4.44731204606794, + "learning_rate": 2.9989347529218463e-06, + "loss": 0.5906, + "step": 1010 + }, + { + "epoch": 0.04171824709086407, + "grad_norm": 3.2136792452646454, + "learning_rate": 2.9989271854850263e-06, + "loss": 0.6414, + "step": 1011 + }, + { + "epoch": 0.041759511430222, + "grad_norm": 2.559814680264459, + "learning_rate": 2.998919591273617e-06, + "loss": 0.6371, + "step": 1012 + }, + { + "epoch": 0.041800775769579926, + "grad_norm": 3.063324022832366, + "learning_rate": 2.9989119702877534e-06, + "loss": 0.5675, + "step": 1013 + }, + { + "epoch": 0.041842040108937856, + "grad_norm": 3.37448024350652, + "learning_rate": 2.9989043225275728e-06, + "loss": 0.5982, + "step": 1014 + }, + { + "epoch": 0.041883304448295786, + "grad_norm": 2.9291047382431348, + "learning_rate": 2.9988966479932114e-06, + "loss": 0.6227, + "step": 1015 + }, + { + "epoch": 0.04192456878765371, + "grad_norm": 2.783933043683264, + "learning_rate": 2.998888946684806e-06, + "loss": 0.6265, + "step": 1016 + }, + { + "epoch": 0.04196583312701164, + "grad_norm": 4.109312774457752, + "learning_rate": 2.998881218602494e-06, + "loss": 0.6001, + "step": 1017 + }, + { + "epoch": 0.04200709746636956, + "grad_norm": 3.9039662731505227, + "learning_rate": 2.9988734637464145e-06, + "loss": 0.6141, + "step": 1018 + }, + { + "epoch": 0.04204836180572749, + "grad_norm": 3.1204731174227582, + "learning_rate": 2.998865682116705e-06, + "loss": 0.5505, + "step": 1019 + }, + { + "epoch": 0.042089626145085414, + "grad_norm": 6.204452715561726, + "learning_rate": 2.998857873713505e-06, + "loss": 0.6386, + "step": 1020 + }, + { + "epoch": 0.042130890484443344, + "grad_norm": 8.569855821484815, + "learning_rate": 2.9988500385369537e-06, + "loss": 0.6187, + "step": 1021 + }, + { + "epoch": 0.042172154823801274, + "grad_norm": 6.098839728484866, + "learning_rate": 2.998842176587191e-06, + "loss": 0.6068, + "step": 1022 + }, + { + "epoch": 0.042213419163159197, + "grad_norm": 3.8135354897425096, + "learning_rate": 2.9988342878643577e-06, + "loss": 0.6298, + "step": 1023 + }, + { + "epoch": 0.042254683502517126, + "grad_norm": 5.688776566694358, + "learning_rate": 2.9988263723685944e-06, + "loss": 0.5712, + "step": 1024 + }, + { + "epoch": 0.04229594784187505, + "grad_norm": 3.1506365758417467, + "learning_rate": 2.9988184301000428e-06, + "loss": 0.6217, + "step": 1025 + }, + { + "epoch": 0.04233721218123298, + "grad_norm": 10.527017578129932, + "learning_rate": 2.998810461058844e-06, + "loss": 0.564, + "step": 1026 + }, + { + "epoch": 0.0423784765205909, + "grad_norm": 3.5522636415361726, + "learning_rate": 2.9988024652451414e-06, + "loss": 0.5649, + "step": 1027 + }, + { + "epoch": 0.04241974085994883, + "grad_norm": 7.681256100316081, + "learning_rate": 2.9987944426590775e-06, + "loss": 0.6289, + "step": 1028 + }, + { + "epoch": 0.04246100519930676, + "grad_norm": 4.815530874198276, + "learning_rate": 2.998786393300795e-06, + "loss": 0.613, + "step": 1029 + }, + { + "epoch": 0.042502269538664685, + "grad_norm": 2.651988872574372, + "learning_rate": 2.9987783171704383e-06, + "loss": 0.5402, + "step": 1030 + }, + { + "epoch": 0.042543533878022614, + "grad_norm": 3.6398242207451803, + "learning_rate": 2.9987702142681517e-06, + "loss": 0.5343, + "step": 1031 + }, + { + "epoch": 0.04258479821738054, + "grad_norm": 12.02417020453265, + "learning_rate": 2.9987620845940794e-06, + "loss": 0.5748, + "step": 1032 + }, + { + "epoch": 0.04262606255673847, + "grad_norm": 13.597003316250118, + "learning_rate": 2.9987539281483673e-06, + "loss": 0.6205, + "step": 1033 + }, + { + "epoch": 0.0426673268960964, + "grad_norm": 5.4535549739556775, + "learning_rate": 2.998745744931161e-06, + "loss": 0.5992, + "step": 1034 + }, + { + "epoch": 0.04270859123545432, + "grad_norm": 5.893432362019419, + "learning_rate": 2.9987375349426056e-06, + "loss": 0.603, + "step": 1035 + }, + { + "epoch": 0.04274985557481225, + "grad_norm": 3.6375350424279356, + "learning_rate": 2.9987292981828485e-06, + "loss": 0.5752, + "step": 1036 + }, + { + "epoch": 0.04279111991417017, + "grad_norm": 4.197536480464535, + "learning_rate": 2.9987210346520375e-06, + "loss": 0.5964, + "step": 1037 + }, + { + "epoch": 0.0428323842535281, + "grad_norm": 6.701784497336075, + "learning_rate": 2.998712744350319e-06, + "loss": 0.6575, + "step": 1038 + }, + { + "epoch": 0.042873648592886025, + "grad_norm": 5.5599820667981055, + "learning_rate": 2.9987044272778427e-06, + "loss": 0.6131, + "step": 1039 + }, + { + "epoch": 0.042914912932243955, + "grad_norm": 2.891335811738777, + "learning_rate": 2.998696083434755e-06, + "loss": 0.6036, + "step": 1040 + }, + { + "epoch": 0.042956177271601885, + "grad_norm": 4.647227877341317, + "learning_rate": 2.998687712821207e-06, + "loss": 0.6586, + "step": 1041 + }, + { + "epoch": 0.04299744161095981, + "grad_norm": 3.1410246479920403, + "learning_rate": 2.998679315437347e-06, + "loss": 0.5684, + "step": 1042 + }, + { + "epoch": 0.04303870595031774, + "grad_norm": 3.1947741263414433, + "learning_rate": 2.9986708912833246e-06, + "loss": 0.6038, + "step": 1043 + }, + { + "epoch": 0.04307997028967566, + "grad_norm": 7.200976793246015, + "learning_rate": 2.9986624403592918e-06, + "loss": 0.6114, + "step": 1044 + }, + { + "epoch": 0.04312123462903359, + "grad_norm": 37.59358915657536, + "learning_rate": 2.9986539626653984e-06, + "loss": 0.6535, + "step": 1045 + }, + { + "epoch": 0.04316249896839151, + "grad_norm": 2.731275306961385, + "learning_rate": 2.998645458201796e-06, + "loss": 0.6457, + "step": 1046 + }, + { + "epoch": 0.04320376330774944, + "grad_norm": 5.62647037577719, + "learning_rate": 2.9986369269686375e-06, + "loss": 0.6697, + "step": 1047 + }, + { + "epoch": 0.04324502764710737, + "grad_norm": 4.743550283332412, + "learning_rate": 2.9986283689660736e-06, + "loss": 0.5913, + "step": 1048 + }, + { + "epoch": 0.043286291986465296, + "grad_norm": 3.489851845130857, + "learning_rate": 2.9986197841942583e-06, + "loss": 0.6403, + "step": 1049 + }, + { + "epoch": 0.043327556325823226, + "grad_norm": 2.5905589587136073, + "learning_rate": 2.998611172653345e-06, + "loss": 0.6023, + "step": 1050 + }, + { + "epoch": 0.04336882066518115, + "grad_norm": 10.460092465849224, + "learning_rate": 2.998602534343487e-06, + "loss": 0.5632, + "step": 1051 + }, + { + "epoch": 0.04341008500453908, + "grad_norm": 2.202190587949813, + "learning_rate": 2.998593869264839e-06, + "loss": 0.6189, + "step": 1052 + }, + { + "epoch": 0.043451349343897, + "grad_norm": 3.2464956578182163, + "learning_rate": 2.998585177417555e-06, + "loss": 0.593, + "step": 1053 + }, + { + "epoch": 0.04349261368325493, + "grad_norm": 4.586749056545372, + "learning_rate": 2.9985764588017916e-06, + "loss": 0.5815, + "step": 1054 + }, + { + "epoch": 0.04353387802261286, + "grad_norm": 4.973629799666119, + "learning_rate": 2.9985677134177033e-06, + "loss": 0.6236, + "step": 1055 + }, + { + "epoch": 0.043575142361970784, + "grad_norm": 3.909488611337619, + "learning_rate": 2.9985589412654466e-06, + "loss": 0.6119, + "step": 1056 + }, + { + "epoch": 0.043616406701328714, + "grad_norm": 51.9094663516081, + "learning_rate": 2.998550142345179e-06, + "loss": 0.6743, + "step": 1057 + }, + { + "epoch": 0.04365767104068664, + "grad_norm": 3.51410940835486, + "learning_rate": 2.9985413166570567e-06, + "loss": 0.6622, + "step": 1058 + }, + { + "epoch": 0.043698935380044567, + "grad_norm": 2.869208117346717, + "learning_rate": 2.9985324642012374e-06, + "loss": 0.5863, + "step": 1059 + }, + { + "epoch": 0.04374019971940249, + "grad_norm": 3.4252345609757677, + "learning_rate": 2.99852358497788e-06, + "loss": 0.5962, + "step": 1060 + }, + { + "epoch": 0.04378146405876042, + "grad_norm": 3.528449225312518, + "learning_rate": 2.9985146789871426e-06, + "loss": 0.625, + "step": 1061 + }, + { + "epoch": 0.04382272839811835, + "grad_norm": 5.802719002352164, + "learning_rate": 2.998505746229184e-06, + "loss": 0.6134, + "step": 1062 + }, + { + "epoch": 0.04386399273747627, + "grad_norm": 3.9997079246833898, + "learning_rate": 2.998496786704164e-06, + "loss": 0.5835, + "step": 1063 + }, + { + "epoch": 0.0439052570768342, + "grad_norm": 3.84881815684763, + "learning_rate": 2.9984878004122437e-06, + "loss": 0.586, + "step": 1064 + }, + { + "epoch": 0.043946521416192125, + "grad_norm": 5.109138431880396, + "learning_rate": 2.9984787873535816e-06, + "loss": 0.5942, + "step": 1065 + }, + { + "epoch": 0.043987785755550055, + "grad_norm": 3.3786643400496903, + "learning_rate": 2.9984697475283404e-06, + "loss": 0.602, + "step": 1066 + }, + { + "epoch": 0.04402905009490798, + "grad_norm": 3.767300270716405, + "learning_rate": 2.9984606809366802e-06, + "loss": 0.5801, + "step": 1067 + }, + { + "epoch": 0.04407031443426591, + "grad_norm": 2.977986732704424, + "learning_rate": 2.9984515875787645e-06, + "loss": 0.587, + "step": 1068 + }, + { + "epoch": 0.04411157877362384, + "grad_norm": 3.0865399015545782, + "learning_rate": 2.9984424674547546e-06, + "loss": 0.6774, + "step": 1069 + }, + { + "epoch": 0.04415284311298176, + "grad_norm": 2.95353059604436, + "learning_rate": 2.9984333205648135e-06, + "loss": 0.5948, + "step": 1070 + }, + { + "epoch": 0.04419410745233969, + "grad_norm": 4.813672262775333, + "learning_rate": 2.998424146909105e-06, + "loss": 0.5959, + "step": 1071 + }, + { + "epoch": 0.04423537179169761, + "grad_norm": 3.3735589918967372, + "learning_rate": 2.9984149464877925e-06, + "loss": 0.5887, + "step": 1072 + }, + { + "epoch": 0.04427663613105554, + "grad_norm": 8.583726761507293, + "learning_rate": 2.9984057193010406e-06, + "loss": 0.6349, + "step": 1073 + }, + { + "epoch": 0.044317900470413465, + "grad_norm": 4.809039614929171, + "learning_rate": 2.998396465349014e-06, + "loss": 0.6037, + "step": 1074 + }, + { + "epoch": 0.044359164809771395, + "grad_norm": 3.9806179733464226, + "learning_rate": 2.9983871846318786e-06, + "loss": 0.5998, + "step": 1075 + }, + { + "epoch": 0.044400429149129325, + "grad_norm": 3.2551089899562053, + "learning_rate": 2.9983778771497995e-06, + "loss": 0.5725, + "step": 1076 + }, + { + "epoch": 0.04444169348848725, + "grad_norm": 3.922707457063375, + "learning_rate": 2.998368542902943e-06, + "loss": 0.6358, + "step": 1077 + }, + { + "epoch": 0.04448295782784518, + "grad_norm": 3.3779663826431303, + "learning_rate": 2.9983591818914764e-06, + "loss": 0.5909, + "step": 1078 + }, + { + "epoch": 0.0445242221672031, + "grad_norm": 4.929894294716991, + "learning_rate": 2.998349794115566e-06, + "loss": 0.6359, + "step": 1079 + }, + { + "epoch": 0.04456548650656103, + "grad_norm": 3.214534915154392, + "learning_rate": 2.9983403795753804e-06, + "loss": 0.6203, + "step": 1080 + }, + { + "epoch": 0.044606750845918954, + "grad_norm": 2.6919061849946226, + "learning_rate": 2.9983309382710865e-06, + "loss": 0.5734, + "step": 1081 + }, + { + "epoch": 0.04464801518527688, + "grad_norm": 5.4866928628776845, + "learning_rate": 2.9983214702028546e-06, + "loss": 0.5741, + "step": 1082 + }, + { + "epoch": 0.04468927952463481, + "grad_norm": 4.971395241546023, + "learning_rate": 2.9983119753708527e-06, + "loss": 0.6319, + "step": 1083 + }, + { + "epoch": 0.044730543863992736, + "grad_norm": 7.803518588571939, + "learning_rate": 2.998302453775251e-06, + "loss": 0.5299, + "step": 1084 + }, + { + "epoch": 0.044771808203350666, + "grad_norm": 10.774080679016075, + "learning_rate": 2.998292905416219e-06, + "loss": 0.607, + "step": 1085 + }, + { + "epoch": 0.04481307254270859, + "grad_norm": 2.820328637989205, + "learning_rate": 2.9982833302939277e-06, + "loss": 0.5641, + "step": 1086 + }, + { + "epoch": 0.04485433688206652, + "grad_norm": 2.3816527002714087, + "learning_rate": 2.998273728408548e-06, + "loss": 0.6272, + "step": 1087 + }, + { + "epoch": 0.04489560122142444, + "grad_norm": 2.1923721559839544, + "learning_rate": 2.998264099760251e-06, + "loss": 0.5928, + "step": 1088 + }, + { + "epoch": 0.04493686556078237, + "grad_norm": 5.557223210891938, + "learning_rate": 2.9982544443492097e-06, + "loss": 0.5792, + "step": 1089 + }, + { + "epoch": 0.0449781299001403, + "grad_norm": 5.111944486327941, + "learning_rate": 2.9982447621755958e-06, + "loss": 0.5435, + "step": 1090 + }, + { + "epoch": 0.045019394239498224, + "grad_norm": 3.655488234771448, + "learning_rate": 2.9982350532395828e-06, + "loss": 0.6529, + "step": 1091 + }, + { + "epoch": 0.045060658578856154, + "grad_norm": 5.650004048725721, + "learning_rate": 2.998225317541343e-06, + "loss": 0.5984, + "step": 1092 + }, + { + "epoch": 0.04510192291821408, + "grad_norm": 2.6904573785939703, + "learning_rate": 2.9982155550810515e-06, + "loss": 0.5718, + "step": 1093 + }, + { + "epoch": 0.04514318725757201, + "grad_norm": 3.6868265051456954, + "learning_rate": 2.998205765858882e-06, + "loss": 0.5628, + "step": 1094 + }, + { + "epoch": 0.045184451596929937, + "grad_norm": 3.080541426796186, + "learning_rate": 2.9981959498750096e-06, + "loss": 0.6577, + "step": 1095 + }, + { + "epoch": 0.04522571593628786, + "grad_norm": 4.511519519760433, + "learning_rate": 2.9981861071296096e-06, + "loss": 0.6346, + "step": 1096 + }, + { + "epoch": 0.04526698027564579, + "grad_norm": 2.9088351486037727, + "learning_rate": 2.9981762376228577e-06, + "loss": 0.6137, + "step": 1097 + }, + { + "epoch": 0.04530824461500371, + "grad_norm": 4.981801080096467, + "learning_rate": 2.9981663413549307e-06, + "loss": 0.6914, + "step": 1098 + }, + { + "epoch": 0.04534950895436164, + "grad_norm": 2.446254195137889, + "learning_rate": 2.998156418326005e-06, + "loss": 0.6184, + "step": 1099 + }, + { + "epoch": 0.045390773293719565, + "grad_norm": 3.1791391048067577, + "learning_rate": 2.9981464685362576e-06, + "loss": 0.671, + "step": 1100 + }, + { + "epoch": 0.045432037633077495, + "grad_norm": 5.280964729421197, + "learning_rate": 2.998136491985867e-06, + "loss": 0.6804, + "step": 1101 + }, + { + "epoch": 0.045473301972435425, + "grad_norm": 2.810773255583651, + "learning_rate": 2.9981264886750095e-06, + "loss": 0.5974, + "step": 1102 + }, + { + "epoch": 0.04551456631179335, + "grad_norm": 6.478639807257423, + "learning_rate": 2.9981164586038666e-06, + "loss": 0.6249, + "step": 1103 + }, + { + "epoch": 0.04555583065115128, + "grad_norm": 2.7302506547437693, + "learning_rate": 2.9981064017726156e-06, + "loss": 0.6436, + "step": 1104 + }, + { + "epoch": 0.0455970949905092, + "grad_norm": 4.2415071601137795, + "learning_rate": 2.9980963181814367e-06, + "loss": 0.6091, + "step": 1105 + }, + { + "epoch": 0.04563835932986713, + "grad_norm": 6.947322462418311, + "learning_rate": 2.9980862078305096e-06, + "loss": 0.5493, + "step": 1106 + }, + { + "epoch": 0.04567962366922505, + "grad_norm": 4.657943504598035, + "learning_rate": 2.9980760707200156e-06, + "loss": 0.5868, + "step": 1107 + }, + { + "epoch": 0.04572088800858298, + "grad_norm": 5.292192897237012, + "learning_rate": 2.998065906850135e-06, + "loss": 0.6033, + "step": 1108 + }, + { + "epoch": 0.04576215234794091, + "grad_norm": 3.4310083695987297, + "learning_rate": 2.9980557162210498e-06, + "loss": 0.6084, + "step": 1109 + }, + { + "epoch": 0.045803416687298835, + "grad_norm": 2.258183871523652, + "learning_rate": 2.9980454988329417e-06, + "loss": 0.5526, + "step": 1110 + }, + { + "epoch": 0.045844681026656765, + "grad_norm": 6.514592091179378, + "learning_rate": 2.998035254685994e-06, + "loss": 0.6535, + "step": 1111 + }, + { + "epoch": 0.04588594536601469, + "grad_norm": 3.581361862086735, + "learning_rate": 2.998024983780389e-06, + "loss": 0.5559, + "step": 1112 + }, + { + "epoch": 0.04592720970537262, + "grad_norm": 3.8811454578420777, + "learning_rate": 2.99801468611631e-06, + "loss": 0.543, + "step": 1113 + }, + { + "epoch": 0.04596847404473054, + "grad_norm": 3.163356171923323, + "learning_rate": 2.9980043616939415e-06, + "loss": 0.5915, + "step": 1114 + }, + { + "epoch": 0.04600973838408847, + "grad_norm": 4.281643713820787, + "learning_rate": 2.9979940105134676e-06, + "loss": 0.5951, + "step": 1115 + }, + { + "epoch": 0.0460510027234464, + "grad_norm": 3.158175051583453, + "learning_rate": 2.997983632575073e-06, + "loss": 0.5718, + "step": 1116 + }, + { + "epoch": 0.046092267062804324, + "grad_norm": 6.1129108895864865, + "learning_rate": 2.9979732278789435e-06, + "loss": 0.5708, + "step": 1117 + }, + { + "epoch": 0.04613353140216225, + "grad_norm": 4.126590374197748, + "learning_rate": 2.997962796425265e-06, + "loss": 0.6356, + "step": 1118 + }, + { + "epoch": 0.046174795741520176, + "grad_norm": 4.236579188278658, + "learning_rate": 2.9979523382142236e-06, + "loss": 0.6997, + "step": 1119 + }, + { + "epoch": 0.046216060080878106, + "grad_norm": 15.29131157725388, + "learning_rate": 2.9979418532460057e-06, + "loss": 0.63, + "step": 1120 + }, + { + "epoch": 0.04625732442023603, + "grad_norm": 4.420951859557349, + "learning_rate": 2.9979313415207997e-06, + "loss": 0.6172, + "step": 1121 + }, + { + "epoch": 0.04629858875959396, + "grad_norm": 3.1103558059526186, + "learning_rate": 2.9979208030387923e-06, + "loss": 0.6528, + "step": 1122 + }, + { + "epoch": 0.04633985309895189, + "grad_norm": 3.8683999707457395, + "learning_rate": 2.997910237800172e-06, + "loss": 0.5432, + "step": 1123 + }, + { + "epoch": 0.04638111743830981, + "grad_norm": 7.282752452352119, + "learning_rate": 2.997899645805128e-06, + "loss": 0.5585, + "step": 1124 + }, + { + "epoch": 0.04642238177766774, + "grad_norm": 2.7890542967922554, + "learning_rate": 2.997889027053849e-06, + "loss": 0.5805, + "step": 1125 + }, + { + "epoch": 0.046463646117025664, + "grad_norm": 4.993925792723188, + "learning_rate": 2.997878381546525e-06, + "loss": 0.5906, + "step": 1126 + }, + { + "epoch": 0.046504910456383594, + "grad_norm": 3.7140948121509467, + "learning_rate": 2.997867709283346e-06, + "loss": 0.6358, + "step": 1127 + }, + { + "epoch": 0.04654617479574152, + "grad_norm": 50.8061769699492, + "learning_rate": 2.9978570102645024e-06, + "loss": 0.6184, + "step": 1128 + }, + { + "epoch": 0.04658743913509945, + "grad_norm": 4.320816461348566, + "learning_rate": 2.9978462844901857e-06, + "loss": 0.5895, + "step": 1129 + }, + { + "epoch": 0.04662870347445738, + "grad_norm": 18.384388626459632, + "learning_rate": 2.9978355319605868e-06, + "loss": 0.59, + "step": 1130 + }, + { + "epoch": 0.0466699678138153, + "grad_norm": 3.5087472355480234, + "learning_rate": 2.9978247526758987e-06, + "loss": 0.5898, + "step": 1131 + }, + { + "epoch": 0.04671123215317323, + "grad_norm": 14.435546116842156, + "learning_rate": 2.9978139466363135e-06, + "loss": 0.5444, + "step": 1132 + }, + { + "epoch": 0.04675249649253115, + "grad_norm": 4.1217012441726375, + "learning_rate": 2.997803113842024e-06, + "loss": 0.5716, + "step": 1133 + }, + { + "epoch": 0.04679376083188908, + "grad_norm": 2.5764450563220995, + "learning_rate": 2.997792254293224e-06, + "loss": 0.6527, + "step": 1134 + }, + { + "epoch": 0.046835025171247005, + "grad_norm": 4.1771331500974, + "learning_rate": 2.997781367990108e-06, + "loss": 0.5832, + "step": 1135 + }, + { + "epoch": 0.046876289510604935, + "grad_norm": 3.4291069867429176, + "learning_rate": 2.9977704549328697e-06, + "loss": 0.6195, + "step": 1136 + }, + { + "epoch": 0.046917553849962865, + "grad_norm": 2.9858730018107904, + "learning_rate": 2.997759515121704e-06, + "loss": 0.5994, + "step": 1137 + }, + { + "epoch": 0.04695881818932079, + "grad_norm": 2.9372391532885085, + "learning_rate": 2.997748548556807e-06, + "loss": 0.5809, + "step": 1138 + }, + { + "epoch": 0.04700008252867872, + "grad_norm": 2.6229581656563177, + "learning_rate": 2.9977375552383734e-06, + "loss": 0.6367, + "step": 1139 + }, + { + "epoch": 0.04704134686803664, + "grad_norm": 2.64492620597743, + "learning_rate": 2.997726535166601e-06, + "loss": 0.5621, + "step": 1140 + }, + { + "epoch": 0.04708261120739457, + "grad_norm": 5.191802549838599, + "learning_rate": 2.9977154883416854e-06, + "loss": 0.6461, + "step": 1141 + }, + { + "epoch": 0.04712387554675249, + "grad_norm": 2.3606958613375575, + "learning_rate": 2.997704414763825e-06, + "loss": 0.5973, + "step": 1142 + }, + { + "epoch": 0.04716513988611042, + "grad_norm": 4.0710867129146635, + "learning_rate": 2.997693314433216e-06, + "loss": 0.6685, + "step": 1143 + }, + { + "epoch": 0.04720640422546835, + "grad_norm": 4.1113307903125795, + "learning_rate": 2.997682187350059e-06, + "loss": 0.5862, + "step": 1144 + }, + { + "epoch": 0.047247668564826276, + "grad_norm": 4.080142243315728, + "learning_rate": 2.997671033514551e-06, + "loss": 0.6231, + "step": 1145 + }, + { + "epoch": 0.047288932904184205, + "grad_norm": 3.967227327351273, + "learning_rate": 2.997659852926892e-06, + "loss": 0.6088, + "step": 1146 + }, + { + "epoch": 0.04733019724354213, + "grad_norm": 5.652121441195586, + "learning_rate": 2.9976486455872806e-06, + "loss": 0.6698, + "step": 1147 + }, + { + "epoch": 0.04737146158290006, + "grad_norm": 2.3265847214916833, + "learning_rate": 2.997637411495919e-06, + "loss": 0.6503, + "step": 1148 + }, + { + "epoch": 0.04741272592225799, + "grad_norm": 3.148753270602181, + "learning_rate": 2.997626150653006e-06, + "loss": 0.6027, + "step": 1149 + }, + { + "epoch": 0.04745399026161591, + "grad_norm": 6.806822129809211, + "learning_rate": 2.9976148630587436e-06, + "loss": 0.6044, + "step": 1150 + }, + { + "epoch": 0.04749525460097384, + "grad_norm": 3.3366077636600555, + "learning_rate": 2.997603548713333e-06, + "loss": 0.5703, + "step": 1151 + }, + { + "epoch": 0.047536518940331764, + "grad_norm": 9.030951521747879, + "learning_rate": 2.997592207616977e-06, + "loss": 0.6056, + "step": 1152 + }, + { + "epoch": 0.047577783279689694, + "grad_norm": 4.3536477870322114, + "learning_rate": 2.997580839769878e-06, + "loss": 0.5588, + "step": 1153 + }, + { + "epoch": 0.047619047619047616, + "grad_norm": 4.1234329824361815, + "learning_rate": 2.9975694451722386e-06, + "loss": 0.5943, + "step": 1154 + }, + { + "epoch": 0.047660311958405546, + "grad_norm": 3.706033091018318, + "learning_rate": 2.997558023824262e-06, + "loss": 0.5852, + "step": 1155 + }, + { + "epoch": 0.047701576297763476, + "grad_norm": 8.970478831411263, + "learning_rate": 2.9975465757261535e-06, + "loss": 0.5941, + "step": 1156 + }, + { + "epoch": 0.0477428406371214, + "grad_norm": 3.788593728439076, + "learning_rate": 2.9975351008781165e-06, + "loss": 0.6217, + "step": 1157 + }, + { + "epoch": 0.04778410497647933, + "grad_norm": 3.9917595871977993, + "learning_rate": 2.9975235992803563e-06, + "loss": 0.5719, + "step": 1158 + }, + { + "epoch": 0.04782536931583725, + "grad_norm": 3.271479816661568, + "learning_rate": 2.997512070933079e-06, + "loss": 0.635, + "step": 1159 + }, + { + "epoch": 0.04786663365519518, + "grad_norm": 11.332837066708644, + "learning_rate": 2.997500515836489e-06, + "loss": 0.6148, + "step": 1160 + }, + { + "epoch": 0.047907897994553104, + "grad_norm": 4.811082905927761, + "learning_rate": 2.997488933990794e-06, + "loss": 0.6046, + "step": 1161 + }, + { + "epoch": 0.047949162333911034, + "grad_norm": 6.5260241542176685, + "learning_rate": 2.9974773253962004e-06, + "loss": 0.6179, + "step": 1162 + }, + { + "epoch": 0.047990426673268964, + "grad_norm": 10.653086417380818, + "learning_rate": 2.997465690052916e-06, + "loss": 0.6125, + "step": 1163 + }, + { + "epoch": 0.04803169101262689, + "grad_norm": 3.1336847273210195, + "learning_rate": 2.997454027961148e-06, + "loss": 0.5688, + "step": 1164 + }, + { + "epoch": 0.04807295535198482, + "grad_norm": 31.728581741185902, + "learning_rate": 2.9974423391211056e-06, + "loss": 0.6026, + "step": 1165 + }, + { + "epoch": 0.04811421969134274, + "grad_norm": 2.1604685117386415, + "learning_rate": 2.997430623532996e-06, + "loss": 0.5825, + "step": 1166 + }, + { + "epoch": 0.04815548403070067, + "grad_norm": 4.5364824378799655, + "learning_rate": 2.99741888119703e-06, + "loss": 0.6601, + "step": 1167 + }, + { + "epoch": 0.04819674837005859, + "grad_norm": 5.9463527473136795, + "learning_rate": 2.9974071121134167e-06, + "loss": 0.6199, + "step": 1168 + }, + { + "epoch": 0.04823801270941652, + "grad_norm": 5.32394524932378, + "learning_rate": 2.9973953162823667e-06, + "loss": 0.5335, + "step": 1169 + }, + { + "epoch": 0.04827927704877445, + "grad_norm": 4.05074898547592, + "learning_rate": 2.9973834937040902e-06, + "loss": 0.569, + "step": 1170 + }, + { + "epoch": 0.048320541388132375, + "grad_norm": 4.324619200663388, + "learning_rate": 2.9973716443787986e-06, + "loss": 0.6138, + "step": 1171 + }, + { + "epoch": 0.048361805727490305, + "grad_norm": 4.981924376353572, + "learning_rate": 2.997359768306704e-06, + "loss": 0.5576, + "step": 1172 + }, + { + "epoch": 0.04840307006684823, + "grad_norm": 3.334684624179012, + "learning_rate": 2.9973478654880176e-06, + "loss": 0.5997, + "step": 1173 + }, + { + "epoch": 0.04844433440620616, + "grad_norm": 29.149137973998318, + "learning_rate": 2.9973359359229526e-06, + "loss": 0.6914, + "step": 1174 + }, + { + "epoch": 0.04848559874556408, + "grad_norm": 3.1595755411115602, + "learning_rate": 2.9973239796117225e-06, + "loss": 0.6374, + "step": 1175 + }, + { + "epoch": 0.04852686308492201, + "grad_norm": 3.5700485278630905, + "learning_rate": 2.9973119965545397e-06, + "loss": 0.6283, + "step": 1176 + }, + { + "epoch": 0.04856812742427994, + "grad_norm": 2.6178288746156926, + "learning_rate": 2.997299986751619e-06, + "loss": 0.6214, + "step": 1177 + }, + { + "epoch": 0.04860939176363786, + "grad_norm": 2.647015033957827, + "learning_rate": 2.9972879502031755e-06, + "loss": 0.626, + "step": 1178 + }, + { + "epoch": 0.04865065610299579, + "grad_norm": 2.548098180050589, + "learning_rate": 2.9972758869094233e-06, + "loss": 0.6115, + "step": 1179 + }, + { + "epoch": 0.048691920442353716, + "grad_norm": 5.601146766176205, + "learning_rate": 2.997263796870578e-06, + "loss": 0.6289, + "step": 1180 + }, + { + "epoch": 0.048733184781711646, + "grad_norm": 4.494452272602006, + "learning_rate": 2.9972516800868557e-06, + "loss": 0.6739, + "step": 1181 + }, + { + "epoch": 0.04877444912106957, + "grad_norm": 7.746282908501558, + "learning_rate": 2.9972395365584727e-06, + "loss": 0.5647, + "step": 1182 + }, + { + "epoch": 0.0488157134604275, + "grad_norm": 3.1168714874030985, + "learning_rate": 2.9972273662856464e-06, + "loss": 0.5649, + "step": 1183 + }, + { + "epoch": 0.04885697779978543, + "grad_norm": 8.885024391646745, + "learning_rate": 2.9972151692685935e-06, + "loss": 0.6223, + "step": 1184 + }, + { + "epoch": 0.04889824213914335, + "grad_norm": 4.143603402363153, + "learning_rate": 2.997202945507533e-06, + "loss": 0.6451, + "step": 1185 + }, + { + "epoch": 0.04893950647850128, + "grad_norm": 2.378295805464272, + "learning_rate": 2.997190695002682e-06, + "loss": 0.619, + "step": 1186 + }, + { + "epoch": 0.048980770817859204, + "grad_norm": 2.701405602210045, + "learning_rate": 2.9971784177542594e-06, + "loss": 0.5743, + "step": 1187 + }, + { + "epoch": 0.049022035157217134, + "grad_norm": 5.069012702914309, + "learning_rate": 2.997166113762485e-06, + "loss": 0.6866, + "step": 1188 + }, + { + "epoch": 0.04906329949657506, + "grad_norm": 3.9914924435658232, + "learning_rate": 2.9971537830275786e-06, + "loss": 0.6058, + "step": 1189 + }, + { + "epoch": 0.049104563835932986, + "grad_norm": 2.3156972068690336, + "learning_rate": 2.9971414255497607e-06, + "loss": 0.5934, + "step": 1190 + }, + { + "epoch": 0.049145828175290916, + "grad_norm": 8.983118081027195, + "learning_rate": 2.9971290413292513e-06, + "loss": 0.6459, + "step": 1191 + }, + { + "epoch": 0.04918709251464884, + "grad_norm": 3.1993523185630837, + "learning_rate": 2.997116630366272e-06, + "loss": 0.6118, + "step": 1192 + }, + { + "epoch": 0.04922835685400677, + "grad_norm": 2.764790466349803, + "learning_rate": 2.9971041926610444e-06, + "loss": 0.6553, + "step": 1193 + }, + { + "epoch": 0.04926962119336469, + "grad_norm": 3.4867241977134666, + "learning_rate": 2.9970917282137912e-06, + "loss": 0.6018, + "step": 1194 + }, + { + "epoch": 0.04931088553272262, + "grad_norm": 170.42394756546895, + "learning_rate": 2.997079237024734e-06, + "loss": 0.5987, + "step": 1195 + }, + { + "epoch": 0.049352149872080545, + "grad_norm": 4.887764683571777, + "learning_rate": 2.9970667190940966e-06, + "loss": 0.5869, + "step": 1196 + }, + { + "epoch": 0.049393414211438474, + "grad_norm": 3.665667626136361, + "learning_rate": 2.997054174422103e-06, + "loss": 0.5862, + "step": 1197 + }, + { + "epoch": 0.049434678550796404, + "grad_norm": 3.8568716856212713, + "learning_rate": 2.997041603008976e-06, + "loss": 0.6025, + "step": 1198 + }, + { + "epoch": 0.04947594289015433, + "grad_norm": 2.502131695771307, + "learning_rate": 2.9970290048549417e-06, + "loss": 0.6298, + "step": 1199 + }, + { + "epoch": 0.04951720722951226, + "grad_norm": 3.035135122899677, + "learning_rate": 2.9970163799602237e-06, + "loss": 0.5684, + "step": 1200 + }, + { + "epoch": 0.04955847156887018, + "grad_norm": 3.94909678973115, + "learning_rate": 2.9970037283250483e-06, + "loss": 0.5905, + "step": 1201 + }, + { + "epoch": 0.04959973590822811, + "grad_norm": 3.4120812212604057, + "learning_rate": 2.996991049949642e-06, + "loss": 0.5554, + "step": 1202 + }, + { + "epoch": 0.04964100024758604, + "grad_norm": 3.172051172094184, + "learning_rate": 2.99697834483423e-06, + "loss": 0.6147, + "step": 1203 + }, + { + "epoch": 0.04968226458694396, + "grad_norm": 2.6056224081830925, + "learning_rate": 2.9969656129790405e-06, + "loss": 0.5952, + "step": 1204 + }, + { + "epoch": 0.04972352892630189, + "grad_norm": 2.3969440372344746, + "learning_rate": 2.9969528543843e-06, + "loss": 0.6141, + "step": 1205 + }, + { + "epoch": 0.049764793265659815, + "grad_norm": 4.149613940224871, + "learning_rate": 2.9969400690502367e-06, + "loss": 0.6648, + "step": 1206 + }, + { + "epoch": 0.049806057605017745, + "grad_norm": 8.338162868343694, + "learning_rate": 2.996927256977079e-06, + "loss": 0.64, + "step": 1207 + }, + { + "epoch": 0.04984732194437567, + "grad_norm": 3.0293567504930965, + "learning_rate": 2.9969144181650554e-06, + "loss": 0.6564, + "step": 1208 + }, + { + "epoch": 0.0498885862837336, + "grad_norm": 2.448009095532907, + "learning_rate": 2.996901552614396e-06, + "loss": 0.5998, + "step": 1209 + }, + { + "epoch": 0.04992985062309153, + "grad_norm": 2.112466746296231, + "learning_rate": 2.9968886603253305e-06, + "loss": 0.5527, + "step": 1210 + }, + { + "epoch": 0.04997111496244945, + "grad_norm": 3.711631069219858, + "learning_rate": 2.9968757412980883e-06, + "loss": 0.5992, + "step": 1211 + }, + { + "epoch": 0.05001237930180738, + "grad_norm": 4.681374025629615, + "learning_rate": 2.996862795532901e-06, + "loss": 0.61, + "step": 1212 + }, + { + "epoch": 0.0500536436411653, + "grad_norm": 6.297749574512006, + "learning_rate": 2.9968498230299996e-06, + "loss": 0.6592, + "step": 1213 + }, + { + "epoch": 0.05009490798052323, + "grad_norm": 35.92558258206849, + "learning_rate": 2.996836823789616e-06, + "loss": 0.5761, + "step": 1214 + }, + { + "epoch": 0.050136172319881156, + "grad_norm": 5.139887280784008, + "learning_rate": 2.996823797811982e-06, + "loss": 0.5663, + "step": 1215 + }, + { + "epoch": 0.050177436659239086, + "grad_norm": 3.1524175036072095, + "learning_rate": 2.99681074509733e-06, + "loss": 0.6234, + "step": 1216 + }, + { + "epoch": 0.050218700998597016, + "grad_norm": 2.69642867513473, + "learning_rate": 2.9967976656458945e-06, + "loss": 0.626, + "step": 1217 + }, + { + "epoch": 0.05025996533795494, + "grad_norm": 3.584317138342996, + "learning_rate": 2.996784559457908e-06, + "loss": 0.6372, + "step": 1218 + }, + { + "epoch": 0.05030122967731287, + "grad_norm": 3.2374811636278804, + "learning_rate": 2.9967714265336047e-06, + "loss": 0.576, + "step": 1219 + }, + { + "epoch": 0.05034249401667079, + "grad_norm": 20.706952915066758, + "learning_rate": 2.9967582668732192e-06, + "loss": 0.5867, + "step": 1220 + }, + { + "epoch": 0.05038375835602872, + "grad_norm": 2.503299316116699, + "learning_rate": 2.9967450804769872e-06, + "loss": 0.5492, + "step": 1221 + }, + { + "epoch": 0.050425022695386644, + "grad_norm": 5.254511015412739, + "learning_rate": 2.9967318673451436e-06, + "loss": 0.6256, + "step": 1222 + }, + { + "epoch": 0.050466287034744574, + "grad_norm": 3.529837055673965, + "learning_rate": 2.9967186274779247e-06, + "loss": 0.6062, + "step": 1223 + }, + { + "epoch": 0.050507551374102504, + "grad_norm": 8.828498942608169, + "learning_rate": 2.9967053608755665e-06, + "loss": 0.6615, + "step": 1224 + }, + { + "epoch": 0.05054881571346043, + "grad_norm": 21.028717877911628, + "learning_rate": 2.996692067538307e-06, + "loss": 0.5802, + "step": 1225 + }, + { + "epoch": 0.050590080052818356, + "grad_norm": 3.697308112590374, + "learning_rate": 2.9966787474663824e-06, + "loss": 0.6022, + "step": 1226 + }, + { + "epoch": 0.05063134439217628, + "grad_norm": 3.9483667824758903, + "learning_rate": 2.9966654006600313e-06, + "loss": 0.6114, + "step": 1227 + }, + { + "epoch": 0.05067260873153421, + "grad_norm": 7.458255179481411, + "learning_rate": 2.9966520271194924e-06, + "loss": 0.6188, + "step": 1228 + }, + { + "epoch": 0.05071387307089213, + "grad_norm": 4.13846248708528, + "learning_rate": 2.9966386268450043e-06, + "loss": 0.6797, + "step": 1229 + }, + { + "epoch": 0.05075513741025006, + "grad_norm": 4.224378590299508, + "learning_rate": 2.9966251998368066e-06, + "loss": 0.631, + "step": 1230 + }, + { + "epoch": 0.05079640174960799, + "grad_norm": 2.477105495077534, + "learning_rate": 2.996611746095138e-06, + "loss": 0.6171, + "step": 1231 + }, + { + "epoch": 0.050837666088965915, + "grad_norm": 3.5513847871553317, + "learning_rate": 2.99659826562024e-06, + "loss": 0.6783, + "step": 1232 + }, + { + "epoch": 0.050878930428323844, + "grad_norm": 5.904505816058581, + "learning_rate": 2.9965847584123532e-06, + "loss": 0.5847, + "step": 1233 + }, + { + "epoch": 0.05092019476768177, + "grad_norm": 6.085734338901139, + "learning_rate": 2.9965712244717186e-06, + "loss": 0.6152, + "step": 1234 + }, + { + "epoch": 0.0509614591070397, + "grad_norm": 2.873332290848578, + "learning_rate": 2.996557663798578e-06, + "loss": 0.5524, + "step": 1235 + }, + { + "epoch": 0.05100272344639762, + "grad_norm": 2.7143033580457874, + "learning_rate": 2.996544076393174e-06, + "loss": 0.5112, + "step": 1236 + }, + { + "epoch": 0.05104398778575555, + "grad_norm": 3.224431593221288, + "learning_rate": 2.996530462255749e-06, + "loss": 0.6128, + "step": 1237 + }, + { + "epoch": 0.05108525212511348, + "grad_norm": 3.7552815715487142, + "learning_rate": 2.996516821386546e-06, + "loss": 0.5851, + "step": 1238 + }, + { + "epoch": 0.0511265164644714, + "grad_norm": 5.312365600567033, + "learning_rate": 2.996503153785809e-06, + "loss": 0.6539, + "step": 1239 + }, + { + "epoch": 0.05116778080382933, + "grad_norm": 3.583992350387277, + "learning_rate": 2.996489459453782e-06, + "loss": 0.5901, + "step": 1240 + }, + { + "epoch": 0.051209045143187255, + "grad_norm": 3.1574750919762726, + "learning_rate": 2.9964757383907095e-06, + "loss": 0.5898, + "step": 1241 + }, + { + "epoch": 0.051250309482545185, + "grad_norm": 10.707842530564733, + "learning_rate": 2.9964619905968364e-06, + "loss": 0.5618, + "step": 1242 + }, + { + "epoch": 0.05129157382190311, + "grad_norm": 8.328728338451775, + "learning_rate": 2.9964482160724093e-06, + "loss": 0.5967, + "step": 1243 + }, + { + "epoch": 0.05133283816126104, + "grad_norm": 2.9287940311263383, + "learning_rate": 2.9964344148176726e-06, + "loss": 0.588, + "step": 1244 + }, + { + "epoch": 0.05137410250061897, + "grad_norm": 3.8935137782116933, + "learning_rate": 2.9964205868328745e-06, + "loss": 0.5761, + "step": 1245 + }, + { + "epoch": 0.05141536683997689, + "grad_norm": 3.020778685917, + "learning_rate": 2.996406732118261e-06, + "loss": 0.6102, + "step": 1246 + }, + { + "epoch": 0.05145663117933482, + "grad_norm": 3.9639277469152745, + "learning_rate": 2.9963928506740797e-06, + "loss": 0.6209, + "step": 1247 + }, + { + "epoch": 0.05149789551869274, + "grad_norm": 5.274205144579843, + "learning_rate": 2.996378942500579e-06, + "loss": 0.5845, + "step": 1248 + }, + { + "epoch": 0.05153915985805067, + "grad_norm": 4.096071021413338, + "learning_rate": 2.996365007598007e-06, + "loss": 0.6244, + "step": 1249 + }, + { + "epoch": 0.051580424197408596, + "grad_norm": 12.962712249763142, + "learning_rate": 2.9963510459666124e-06, + "loss": 0.5974, + "step": 1250 + }, + { + "epoch": 0.051621688536766526, + "grad_norm": 5.220338476312077, + "learning_rate": 2.996337057606645e-06, + "loss": 0.6228, + "step": 1251 + }, + { + "epoch": 0.051662952876124456, + "grad_norm": 3.9484105849365, + "learning_rate": 2.9963230425183545e-06, + "loss": 0.5818, + "step": 1252 + }, + { + "epoch": 0.05170421721548238, + "grad_norm": 3.638068094379712, + "learning_rate": 2.996309000701991e-06, + "loss": 0.6026, + "step": 1253 + }, + { + "epoch": 0.05174548155484031, + "grad_norm": 2.0519170676942275, + "learning_rate": 2.9962949321578055e-06, + "loss": 0.672, + "step": 1254 + }, + { + "epoch": 0.05178674589419823, + "grad_norm": 2.1832416313800964, + "learning_rate": 2.9962808368860494e-06, + "loss": 0.5263, + "step": 1255 + }, + { + "epoch": 0.05182801023355616, + "grad_norm": 3.0948348926390183, + "learning_rate": 2.9962667148869745e-06, + "loss": 0.5985, + "step": 1256 + }, + { + "epoch": 0.05186927457291409, + "grad_norm": 6.81150828350519, + "learning_rate": 2.9962525661608334e-06, + "loss": 0.5971, + "step": 1257 + }, + { + "epoch": 0.051910538912272014, + "grad_norm": 4.175653614754481, + "learning_rate": 2.996238390707878e-06, + "loss": 0.5705, + "step": 1258 + }, + { + "epoch": 0.051951803251629944, + "grad_norm": 9.44492366876916, + "learning_rate": 2.996224188528362e-06, + "loss": 0.6188, + "step": 1259 + }, + { + "epoch": 0.05199306759098787, + "grad_norm": 2.622739441915276, + "learning_rate": 2.996209959622539e-06, + "loss": 0.5826, + "step": 1260 + }, + { + "epoch": 0.0520343319303458, + "grad_norm": 11.287964070042587, + "learning_rate": 2.9961957039906635e-06, + "loss": 0.6326, + "step": 1261 + }, + { + "epoch": 0.05207559626970372, + "grad_norm": 20.53677171052137, + "learning_rate": 2.9961814216329895e-06, + "loss": 0.5966, + "step": 1262 + }, + { + "epoch": 0.05211686060906165, + "grad_norm": 5.981782189597958, + "learning_rate": 2.996167112549772e-06, + "loss": 0.625, + "step": 1263 + }, + { + "epoch": 0.05215812494841958, + "grad_norm": 2.997914141749258, + "learning_rate": 2.9961527767412678e-06, + "loss": 0.618, + "step": 1264 + }, + { + "epoch": 0.0521993892877775, + "grad_norm": 2.744468578401712, + "learning_rate": 2.996138414207732e-06, + "loss": 0.6428, + "step": 1265 + }, + { + "epoch": 0.05224065362713543, + "grad_norm": 3.287219125520101, + "learning_rate": 2.996124024949421e-06, + "loss": 0.564, + "step": 1266 + }, + { + "epoch": 0.052281917966493355, + "grad_norm": 8.007273670385715, + "learning_rate": 2.9961096089665926e-06, + "loss": 0.5797, + "step": 1267 + }, + { + "epoch": 0.052323182305851285, + "grad_norm": 3.065881175526866, + "learning_rate": 2.996095166259504e-06, + "loss": 0.5611, + "step": 1268 + }, + { + "epoch": 0.05236444664520921, + "grad_norm": 9.511741891819703, + "learning_rate": 2.9960806968284125e-06, + "loss": 0.5419, + "step": 1269 + }, + { + "epoch": 0.05240571098456714, + "grad_norm": 4.251768474576251, + "learning_rate": 2.9960662006735773e-06, + "loss": 0.544, + "step": 1270 + }, + { + "epoch": 0.05244697532392507, + "grad_norm": 14.357525094641112, + "learning_rate": 2.9960516777952576e-06, + "loss": 0.631, + "step": 1271 + }, + { + "epoch": 0.05248823966328299, + "grad_norm": 2.864749772991877, + "learning_rate": 2.996037128193712e-06, + "loss": 0.6462, + "step": 1272 + }, + { + "epoch": 0.05252950400264092, + "grad_norm": 4.005923738162471, + "learning_rate": 2.9960225518692006e-06, + "loss": 0.6168, + "step": 1273 + }, + { + "epoch": 0.05257076834199884, + "grad_norm": 3.195403701321238, + "learning_rate": 2.9960079488219843e-06, + "loss": 0.5681, + "step": 1274 + }, + { + "epoch": 0.05261203268135677, + "grad_norm": 2.231448922288402, + "learning_rate": 2.9959933190523236e-06, + "loss": 0.6027, + "step": 1275 + }, + { + "epoch": 0.052653297020714696, + "grad_norm": 5.1868398054161675, + "learning_rate": 2.99597866256048e-06, + "loss": 0.6763, + "step": 1276 + }, + { + "epoch": 0.052694561360072625, + "grad_norm": 6.107770650648699, + "learning_rate": 2.9959639793467144e-06, + "loss": 0.6443, + "step": 1277 + }, + { + "epoch": 0.052735825699430555, + "grad_norm": 4.596789924700683, + "learning_rate": 2.9959492694112907e-06, + "loss": 0.5499, + "step": 1278 + }, + { + "epoch": 0.05277709003878848, + "grad_norm": 3.006833414736596, + "learning_rate": 2.99593453275447e-06, + "loss": 0.6784, + "step": 1279 + }, + { + "epoch": 0.05281835437814641, + "grad_norm": 2.7379614499098097, + "learning_rate": 2.9959197693765167e-06, + "loss": 0.6402, + "step": 1280 + }, + { + "epoch": 0.05285961871750433, + "grad_norm": 11.291592507886744, + "learning_rate": 2.995904979277694e-06, + "loss": 0.6822, + "step": 1281 + }, + { + "epoch": 0.05290088305686226, + "grad_norm": 2.651415891139626, + "learning_rate": 2.995890162458266e-06, + "loss": 0.5001, + "step": 1282 + }, + { + "epoch": 0.052942147396220184, + "grad_norm": 2.1902398636157483, + "learning_rate": 2.995875318918498e-06, + "loss": 0.6115, + "step": 1283 + }, + { + "epoch": 0.05298341173557811, + "grad_norm": 8.270040509969867, + "learning_rate": 2.9958604486586546e-06, + "loss": 0.5868, + "step": 1284 + }, + { + "epoch": 0.05302467607493604, + "grad_norm": 3.345143511536171, + "learning_rate": 2.9958455516790014e-06, + "loss": 0.5672, + "step": 1285 + }, + { + "epoch": 0.053065940414293966, + "grad_norm": 3.0122730755183524, + "learning_rate": 2.995830627979804e-06, + "loss": 0.6082, + "step": 1286 + }, + { + "epoch": 0.053107204753651896, + "grad_norm": 2.210394238609837, + "learning_rate": 2.99581567756133e-06, + "loss": 0.5926, + "step": 1287 + }, + { + "epoch": 0.05314846909300982, + "grad_norm": 2.1461186810631045, + "learning_rate": 2.995800700423847e-06, + "loss": 0.581, + "step": 1288 + }, + { + "epoch": 0.05318973343236775, + "grad_norm": 41.79400743851607, + "learning_rate": 2.9957856965676205e-06, + "loss": 0.5633, + "step": 1289 + }, + { + "epoch": 0.05323099777172567, + "grad_norm": 4.381581815675684, + "learning_rate": 2.99577066599292e-06, + "loss": 0.5749, + "step": 1290 + }, + { + "epoch": 0.0532722621110836, + "grad_norm": 2.782614745573032, + "learning_rate": 2.995755608700013e-06, + "loss": 0.6048, + "step": 1291 + }, + { + "epoch": 0.05331352645044153, + "grad_norm": 8.044230920343296, + "learning_rate": 2.9957405246891697e-06, + "loss": 0.6064, + "step": 1292 + }, + { + "epoch": 0.053354790789799454, + "grad_norm": 3.563667968547292, + "learning_rate": 2.9957254139606583e-06, + "loss": 0.6017, + "step": 1293 + }, + { + "epoch": 0.053396055129157384, + "grad_norm": 2.88377421982881, + "learning_rate": 2.9957102765147495e-06, + "loss": 0.6047, + "step": 1294 + }, + { + "epoch": 0.05343731946851531, + "grad_norm": 3.0884585756197205, + "learning_rate": 2.995695112351714e-06, + "loss": 0.6018, + "step": 1295 + }, + { + "epoch": 0.05347858380787324, + "grad_norm": 3.346198726727616, + "learning_rate": 2.9956799214718214e-06, + "loss": 0.5921, + "step": 1296 + }, + { + "epoch": 0.05351984814723116, + "grad_norm": 3.8764676639279774, + "learning_rate": 2.995664703875344e-06, + "loss": 0.6031, + "step": 1297 + }, + { + "epoch": 0.05356111248658909, + "grad_norm": 3.040281173375358, + "learning_rate": 2.9956494595625532e-06, + "loss": 0.6639, + "step": 1298 + }, + { + "epoch": 0.05360237682594702, + "grad_norm": 8.949383750865927, + "learning_rate": 2.995634188533722e-06, + "loss": 0.6526, + "step": 1299 + }, + { + "epoch": 0.05364364116530494, + "grad_norm": 4.458740147013527, + "learning_rate": 2.995618890789123e-06, + "loss": 0.5594, + "step": 1300 + }, + { + "epoch": 0.05368490550466287, + "grad_norm": 5.896506067200626, + "learning_rate": 2.9956035663290284e-06, + "loss": 0.6659, + "step": 1301 + }, + { + "epoch": 0.053726169844020795, + "grad_norm": 5.410060157750313, + "learning_rate": 2.9955882151537126e-06, + "loss": 0.6812, + "step": 1302 + }, + { + "epoch": 0.053767434183378725, + "grad_norm": 3.7762211896672824, + "learning_rate": 2.9955728372634507e-06, + "loss": 0.5824, + "step": 1303 + }, + { + "epoch": 0.05380869852273665, + "grad_norm": 3.968346098284296, + "learning_rate": 2.9955574326585155e-06, + "loss": 0.6758, + "step": 1304 + }, + { + "epoch": 0.05384996286209458, + "grad_norm": 2.402739876571965, + "learning_rate": 2.995542001339184e-06, + "loss": 0.5674, + "step": 1305 + }, + { + "epoch": 0.05389122720145251, + "grad_norm": 2.8520505297130128, + "learning_rate": 2.995526543305731e-06, + "loss": 0.6277, + "step": 1306 + }, + { + "epoch": 0.05393249154081043, + "grad_norm": 3.1613729011307, + "learning_rate": 2.9955110585584323e-06, + "loss": 0.577, + "step": 1307 + }, + { + "epoch": 0.05397375588016836, + "grad_norm": 3.535308947117083, + "learning_rate": 2.995495547097565e-06, + "loss": 0.6021, + "step": 1308 + }, + { + "epoch": 0.05401502021952628, + "grad_norm": 3.0290290635447588, + "learning_rate": 2.9954800089234064e-06, + "loss": 0.5781, + "step": 1309 + }, + { + "epoch": 0.05405628455888421, + "grad_norm": 4.690900882206513, + "learning_rate": 2.995464444036234e-06, + "loss": 0.5725, + "step": 1310 + }, + { + "epoch": 0.054097548898242136, + "grad_norm": 3.4384625547823235, + "learning_rate": 2.995448852436325e-06, + "loss": 0.6342, + "step": 1311 + }, + { + "epoch": 0.054138813237600066, + "grad_norm": 9.185673533153546, + "learning_rate": 2.9954332341239584e-06, + "loss": 0.6276, + "step": 1312 + }, + { + "epoch": 0.054180077576957995, + "grad_norm": 3.3265935937353706, + "learning_rate": 2.9954175890994136e-06, + "loss": 0.5635, + "step": 1313 + }, + { + "epoch": 0.05422134191631592, + "grad_norm": 3.2845647988336957, + "learning_rate": 2.9954019173629695e-06, + "loss": 0.5607, + "step": 1314 + }, + { + "epoch": 0.05426260625567385, + "grad_norm": 5.212584670190252, + "learning_rate": 2.995386218914906e-06, + "loss": 0.6047, + "step": 1315 + }, + { + "epoch": 0.05430387059503177, + "grad_norm": 3.0397677640288174, + "learning_rate": 2.995370493755504e-06, + "loss": 0.624, + "step": 1316 + }, + { + "epoch": 0.0543451349343897, + "grad_norm": 5.45023150841103, + "learning_rate": 2.995354741885044e-06, + "loss": 0.6381, + "step": 1317 + }, + { + "epoch": 0.05438639927374763, + "grad_norm": 3.799554400651511, + "learning_rate": 2.9953389633038078e-06, + "loss": 0.5697, + "step": 1318 + }, + { + "epoch": 0.054427663613105554, + "grad_norm": 5.278160114778612, + "learning_rate": 2.995323158012077e-06, + "loss": 0.5775, + "step": 1319 + }, + { + "epoch": 0.05446892795246348, + "grad_norm": 3.0917751780822593, + "learning_rate": 2.9953073260101335e-06, + "loss": 0.5479, + "step": 1320 + }, + { + "epoch": 0.054510192291821406, + "grad_norm": 2.422117127337801, + "learning_rate": 2.9952914672982606e-06, + "loss": 0.564, + "step": 1321 + }, + { + "epoch": 0.054551456631179336, + "grad_norm": 4.548462886012851, + "learning_rate": 2.9952755818767415e-06, + "loss": 0.6178, + "step": 1322 + }, + { + "epoch": 0.05459272097053726, + "grad_norm": 2.842251131582998, + "learning_rate": 2.99525966974586e-06, + "loss": 0.5669, + "step": 1323 + }, + { + "epoch": 0.05463398530989519, + "grad_norm": 3.031230450712514, + "learning_rate": 2.9952437309059e-06, + "loss": 0.5536, + "step": 1324 + }, + { + "epoch": 0.05467524964925312, + "grad_norm": 2.9932555873465088, + "learning_rate": 2.9952277653571467e-06, + "loss": 0.6345, + "step": 1325 + }, + { + "epoch": 0.05471651398861104, + "grad_norm": 3.9700393299542553, + "learning_rate": 2.995211773099885e-06, + "loss": 0.5731, + "step": 1326 + }, + { + "epoch": 0.05475777832796897, + "grad_norm": 3.9721007051945527, + "learning_rate": 2.9951957541344e-06, + "loss": 0.6288, + "step": 1327 + }, + { + "epoch": 0.054799042667326894, + "grad_norm": 3.471416957991604, + "learning_rate": 2.995179708460979e-06, + "loss": 0.6484, + "step": 1328 + }, + { + "epoch": 0.054840307006684824, + "grad_norm": 6.980266493301378, + "learning_rate": 2.995163636079908e-06, + "loss": 0.5827, + "step": 1329 + }, + { + "epoch": 0.05488157134604275, + "grad_norm": 2.637812321202787, + "learning_rate": 2.995147536991474e-06, + "loss": 0.6046, + "step": 1330 + }, + { + "epoch": 0.05492283568540068, + "grad_norm": 2.861349576382318, + "learning_rate": 2.995131411195965e-06, + "loss": 0.6247, + "step": 1331 + }, + { + "epoch": 0.05496410002475861, + "grad_norm": 6.485600159189052, + "learning_rate": 2.9951152586936687e-06, + "loss": 0.5808, + "step": 1332 + }, + { + "epoch": 0.05500536436411653, + "grad_norm": 3.0610046741652104, + "learning_rate": 2.995099079484873e-06, + "loss": 0.5835, + "step": 1333 + }, + { + "epoch": 0.05504662870347446, + "grad_norm": 2.4320132082272257, + "learning_rate": 2.995082873569868e-06, + "loss": 0.5523, + "step": 1334 + }, + { + "epoch": 0.05508789304283238, + "grad_norm": 3.9030900371532726, + "learning_rate": 2.9950666409489433e-06, + "loss": 0.6165, + "step": 1335 + }, + { + "epoch": 0.05512915738219031, + "grad_norm": 3.1905437396017913, + "learning_rate": 2.9950503816223873e-06, + "loss": 0.5938, + "step": 1336 + }, + { + "epoch": 0.055170421721548235, + "grad_norm": 4.256191600842376, + "learning_rate": 2.9950340955904923e-06, + "loss": 0.6158, + "step": 1337 + }, + { + "epoch": 0.055211686060906165, + "grad_norm": 3.867973194547231, + "learning_rate": 2.995017782853548e-06, + "loss": 0.5781, + "step": 1338 + }, + { + "epoch": 0.055252950400264095, + "grad_norm": 4.810851613891745, + "learning_rate": 2.995001443411846e-06, + "loss": 0.5511, + "step": 1339 + }, + { + "epoch": 0.05529421473962202, + "grad_norm": 4.000770294201084, + "learning_rate": 2.9949850772656783e-06, + "loss": 0.5618, + "step": 1340 + }, + { + "epoch": 0.05533547907897995, + "grad_norm": 4.784631223346562, + "learning_rate": 2.9949686844153376e-06, + "loss": 0.629, + "step": 1341 + }, + { + "epoch": 0.05537674341833787, + "grad_norm": 3.178217441625805, + "learning_rate": 2.9949522648611163e-06, + "loss": 0.6327, + "step": 1342 + }, + { + "epoch": 0.0554180077576958, + "grad_norm": 3.2759807126100586, + "learning_rate": 2.9949358186033073e-06, + "loss": 0.5495, + "step": 1343 + }, + { + "epoch": 0.05545927209705372, + "grad_norm": 3.062957413263087, + "learning_rate": 2.9949193456422056e-06, + "loss": 0.5653, + "step": 1344 + }, + { + "epoch": 0.05550053643641165, + "grad_norm": 3.2701425260609156, + "learning_rate": 2.9949028459781036e-06, + "loss": 0.6289, + "step": 1345 + }, + { + "epoch": 0.05554180077576958, + "grad_norm": 2.327564499412974, + "learning_rate": 2.994886319611298e-06, + "loss": 0.6294, + "step": 1346 + }, + { + "epoch": 0.055583065115127506, + "grad_norm": 6.594365448567018, + "learning_rate": 2.9948697665420826e-06, + "loss": 0.5923, + "step": 1347 + }, + { + "epoch": 0.055624329454485436, + "grad_norm": 5.273703257462421, + "learning_rate": 2.994853186770754e-06, + "loss": 0.6169, + "step": 1348 + }, + { + "epoch": 0.05566559379384336, + "grad_norm": 4.477100525499271, + "learning_rate": 2.994836580297608e-06, + "loss": 0.5666, + "step": 1349 + }, + { + "epoch": 0.05570685813320129, + "grad_norm": 2.543958348684202, + "learning_rate": 2.9948199471229406e-06, + "loss": 0.5824, + "step": 1350 + }, + { + "epoch": 0.05574812247255921, + "grad_norm": 3.1228491115456585, + "learning_rate": 2.9948032872470497e-06, + "loss": 0.5766, + "step": 1351 + }, + { + "epoch": 0.05578938681191714, + "grad_norm": 2.0142536637598782, + "learning_rate": 2.9947866006702324e-06, + "loss": 0.5742, + "step": 1352 + }, + { + "epoch": 0.05583065115127507, + "grad_norm": 4.750338354517526, + "learning_rate": 2.9947698873927876e-06, + "loss": 0.5922, + "step": 1353 + }, + { + "epoch": 0.055871915490632994, + "grad_norm": 4.1636193885225765, + "learning_rate": 2.9947531474150128e-06, + "loss": 0.6476, + "step": 1354 + }, + { + "epoch": 0.055913179829990924, + "grad_norm": 7.4461918121383235, + "learning_rate": 2.994736380737208e-06, + "loss": 0.5455, + "step": 1355 + }, + { + "epoch": 0.055954444169348846, + "grad_norm": 3.825810763505465, + "learning_rate": 2.9947195873596717e-06, + "loss": 0.6128, + "step": 1356 + }, + { + "epoch": 0.055995708508706776, + "grad_norm": 3.1083421563503815, + "learning_rate": 2.9947027672827047e-06, + "loss": 0.6277, + "step": 1357 + }, + { + "epoch": 0.0560369728480647, + "grad_norm": 3.6277509961294885, + "learning_rate": 2.9946859205066067e-06, + "loss": 0.592, + "step": 1358 + }, + { + "epoch": 0.05607823718742263, + "grad_norm": 9.02801193076852, + "learning_rate": 2.9946690470316794e-06, + "loss": 0.6707, + "step": 1359 + }, + { + "epoch": 0.05611950152678056, + "grad_norm": 2.9875256801772894, + "learning_rate": 2.9946521468582237e-06, + "loss": 0.6025, + "step": 1360 + }, + { + "epoch": 0.05616076586613848, + "grad_norm": 3.7948340309811597, + "learning_rate": 2.9946352199865414e-06, + "loss": 0.6581, + "step": 1361 + }, + { + "epoch": 0.05620203020549641, + "grad_norm": 8.408149990953723, + "learning_rate": 2.9946182664169355e-06, + "loss": 0.6049, + "step": 1362 + }, + { + "epoch": 0.056243294544854334, + "grad_norm": 5.037918111998162, + "learning_rate": 2.994601286149708e-06, + "loss": 0.6185, + "step": 1363 + }, + { + "epoch": 0.056284558884212264, + "grad_norm": 3.243551603876824, + "learning_rate": 2.9945842791851626e-06, + "loss": 0.596, + "step": 1364 + }, + { + "epoch": 0.05632582322357019, + "grad_norm": 4.4741161015556745, + "learning_rate": 2.9945672455236033e-06, + "loss": 0.6333, + "step": 1365 + }, + { + "epoch": 0.05636708756292812, + "grad_norm": 4.872886377852894, + "learning_rate": 2.9945501851653346e-06, + "loss": 0.5647, + "step": 1366 + }, + { + "epoch": 0.05640835190228605, + "grad_norm": 24.307543579372926, + "learning_rate": 2.99453309811066e-06, + "loss": 0.5717, + "step": 1367 + }, + { + "epoch": 0.05644961624164397, + "grad_norm": 5.377085602355166, + "learning_rate": 2.994515984359886e-06, + "loss": 0.5321, + "step": 1368 + }, + { + "epoch": 0.0564908805810019, + "grad_norm": 4.104886843285927, + "learning_rate": 2.9944988439133175e-06, + "loss": 0.5565, + "step": 1369 + }, + { + "epoch": 0.05653214492035982, + "grad_norm": 7.904833704508448, + "learning_rate": 2.9944816767712615e-06, + "loss": 0.7083, + "step": 1370 + }, + { + "epoch": 0.05657340925971775, + "grad_norm": 3.3214112589690696, + "learning_rate": 2.9944644829340236e-06, + "loss": 0.6376, + "step": 1371 + }, + { + "epoch": 0.05661467359907568, + "grad_norm": 2.2333733873914197, + "learning_rate": 2.994447262401912e-06, + "loss": 0.6324, + "step": 1372 + }, + { + "epoch": 0.056655937938433605, + "grad_norm": 15.38031798285019, + "learning_rate": 2.994430015175233e-06, + "loss": 0.5957, + "step": 1373 + }, + { + "epoch": 0.056697202277791535, + "grad_norm": 10.585778814210192, + "learning_rate": 2.994412741254296e-06, + "loss": 0.5803, + "step": 1374 + }, + { + "epoch": 0.05673846661714946, + "grad_norm": 6.71263235596266, + "learning_rate": 2.994395440639409e-06, + "loss": 0.5959, + "step": 1375 + }, + { + "epoch": 0.05677973095650739, + "grad_norm": 3.228004537643923, + "learning_rate": 2.9943781133308805e-06, + "loss": 0.578, + "step": 1376 + }, + { + "epoch": 0.05682099529586531, + "grad_norm": 2.7313394461048435, + "learning_rate": 2.994360759329021e-06, + "loss": 0.5885, + "step": 1377 + }, + { + "epoch": 0.05686225963522324, + "grad_norm": 2.806144360047511, + "learning_rate": 2.99434337863414e-06, + "loss": 0.5611, + "step": 1378 + }, + { + "epoch": 0.05690352397458117, + "grad_norm": 3.731274895056601, + "learning_rate": 2.994325971246548e-06, + "loss": 0.5871, + "step": 1379 + }, + { + "epoch": 0.05694478831393909, + "grad_norm": 3.3361479455213128, + "learning_rate": 2.9943085371665557e-06, + "loss": 0.5757, + "step": 1380 + }, + { + "epoch": 0.05698605265329702, + "grad_norm": 3.840231888652367, + "learning_rate": 2.9942910763944748e-06, + "loss": 0.5805, + "step": 1381 + }, + { + "epoch": 0.057027316992654946, + "grad_norm": 3.9130886613505664, + "learning_rate": 2.994273588930617e-06, + "loss": 0.5823, + "step": 1382 + }, + { + "epoch": 0.057068581332012876, + "grad_norm": 4.375207890864934, + "learning_rate": 2.994256074775295e-06, + "loss": 0.5696, + "step": 1383 + }, + { + "epoch": 0.0571098456713708, + "grad_norm": 3.058430078245664, + "learning_rate": 2.994238533928822e-06, + "loss": 0.6284, + "step": 1384 + }, + { + "epoch": 0.05715111001072873, + "grad_norm": 11.610629091945231, + "learning_rate": 2.99422096639151e-06, + "loss": 0.6016, + "step": 1385 + }, + { + "epoch": 0.05719237435008666, + "grad_norm": 5.1121899578428724, + "learning_rate": 2.994203372163674e-06, + "loss": 0.6052, + "step": 1386 + }, + { + "epoch": 0.05723363868944458, + "grad_norm": 3.148635721421823, + "learning_rate": 2.9941857512456274e-06, + "loss": 0.5481, + "step": 1387 + }, + { + "epoch": 0.05727490302880251, + "grad_norm": 5.654988581427127, + "learning_rate": 2.9941681036376854e-06, + "loss": 0.6242, + "step": 1388 + }, + { + "epoch": 0.057316167368160434, + "grad_norm": 3.5068188427985483, + "learning_rate": 2.994150429340164e-06, + "loss": 0.6116, + "step": 1389 + }, + { + "epoch": 0.057357431707518364, + "grad_norm": 3.245519840096283, + "learning_rate": 2.9941327283533775e-06, + "loss": 0.5664, + "step": 1390 + }, + { + "epoch": 0.05739869604687629, + "grad_norm": 6.430037294750573, + "learning_rate": 2.994115000677643e-06, + "loss": 0.5886, + "step": 1391 + }, + { + "epoch": 0.057439960386234216, + "grad_norm": 2.4686501412802855, + "learning_rate": 2.9940972463132762e-06, + "loss": 0.6085, + "step": 1392 + }, + { + "epoch": 0.057481224725592146, + "grad_norm": 10.499342488949958, + "learning_rate": 2.9940794652605954e-06, + "loss": 0.6125, + "step": 1393 + }, + { + "epoch": 0.05752248906495007, + "grad_norm": 2.818079485971711, + "learning_rate": 2.9940616575199177e-06, + "loss": 0.5559, + "step": 1394 + }, + { + "epoch": 0.057563753404308, + "grad_norm": 5.777882018405451, + "learning_rate": 2.9940438230915616e-06, + "loss": 0.6178, + "step": 1395 + }, + { + "epoch": 0.05760501774366592, + "grad_norm": 2.2746684387211586, + "learning_rate": 2.9940259619758447e-06, + "loss": 0.5585, + "step": 1396 + }, + { + "epoch": 0.05764628208302385, + "grad_norm": 5.051223650167324, + "learning_rate": 2.9940080741730873e-06, + "loss": 0.6333, + "step": 1397 + }, + { + "epoch": 0.057687546422381775, + "grad_norm": 2.8363112797562944, + "learning_rate": 2.9939901596836077e-06, + "loss": 0.5885, + "step": 1398 + }, + { + "epoch": 0.057728810761739704, + "grad_norm": 7.939717954447479, + "learning_rate": 2.9939722185077267e-06, + "loss": 0.5994, + "step": 1399 + }, + { + "epoch": 0.057770075101097634, + "grad_norm": 2.7084240244486906, + "learning_rate": 2.9939542506457643e-06, + "loss": 0.6314, + "step": 1400 + }, + { + "epoch": 0.05781133944045556, + "grad_norm": 8.22582162083056, + "learning_rate": 2.993936256098042e-06, + "loss": 0.6249, + "step": 1401 + }, + { + "epoch": 0.05785260377981349, + "grad_norm": 4.821452100363905, + "learning_rate": 2.993918234864881e-06, + "loss": 0.599, + "step": 1402 + }, + { + "epoch": 0.05789386811917141, + "grad_norm": 2.722816762861776, + "learning_rate": 2.993900186946603e-06, + "loss": 0.5825, + "step": 1403 + }, + { + "epoch": 0.05793513245852934, + "grad_norm": 6.0818660046173685, + "learning_rate": 2.99388211234353e-06, + "loss": 0.6269, + "step": 1404 + }, + { + "epoch": 0.05797639679788726, + "grad_norm": 4.335150652862956, + "learning_rate": 2.9938640110559857e-06, + "loss": 0.5587, + "step": 1405 + }, + { + "epoch": 0.05801766113724519, + "grad_norm": 3.2449197325131474, + "learning_rate": 2.9938458830842935e-06, + "loss": 0.5556, + "step": 1406 + }, + { + "epoch": 0.05805892547660312, + "grad_norm": 3.570089565885419, + "learning_rate": 2.9938277284287764e-06, + "loss": 0.6052, + "step": 1407 + }, + { + "epoch": 0.058100189815961045, + "grad_norm": 3.1770116509570223, + "learning_rate": 2.993809547089759e-06, + "loss": 0.5842, + "step": 1408 + }, + { + "epoch": 0.058141454155318975, + "grad_norm": 5.931017249073114, + "learning_rate": 2.993791339067566e-06, + "loss": 0.6093, + "step": 1409 + }, + { + "epoch": 0.0581827184946769, + "grad_norm": 6.470768474312683, + "learning_rate": 2.993773104362523e-06, + "loss": 0.5745, + "step": 1410 + }, + { + "epoch": 0.05822398283403483, + "grad_norm": 3.538857442024, + "learning_rate": 2.993754842974956e-06, + "loss": 0.5836, + "step": 1411 + }, + { + "epoch": 0.05826524717339275, + "grad_norm": 2.604381155564453, + "learning_rate": 2.9937365549051903e-06, + "loss": 0.5425, + "step": 1412 + }, + { + "epoch": 0.05830651151275068, + "grad_norm": 3.7846426061159804, + "learning_rate": 2.9937182401535526e-06, + "loss": 0.5652, + "step": 1413 + }, + { + "epoch": 0.05834777585210861, + "grad_norm": 2.6750553023309664, + "learning_rate": 2.9936998987203707e-06, + "loss": 0.6185, + "step": 1414 + }, + { + "epoch": 0.05838904019146653, + "grad_norm": 4.004839211880988, + "learning_rate": 2.993681530605972e-06, + "loss": 0.6129, + "step": 1415 + }, + { + "epoch": 0.05843030453082446, + "grad_norm": 5.95463118502931, + "learning_rate": 2.993663135810685e-06, + "loss": 0.594, + "step": 1416 + }, + { + "epoch": 0.058471568870182386, + "grad_norm": 3.509227754618014, + "learning_rate": 2.993644714334837e-06, + "loss": 0.6082, + "step": 1417 + }, + { + "epoch": 0.058512833209540316, + "grad_norm": 2.4463188527849447, + "learning_rate": 2.993626266178758e-06, + "loss": 0.5244, + "step": 1418 + }, + { + "epoch": 0.05855409754889824, + "grad_norm": 2.7029172653946345, + "learning_rate": 2.993607791342778e-06, + "loss": 0.5806, + "step": 1419 + }, + { + "epoch": 0.05859536188825617, + "grad_norm": 2.56162993098148, + "learning_rate": 2.993589289827226e-06, + "loss": 0.6076, + "step": 1420 + }, + { + "epoch": 0.0586366262276141, + "grad_norm": 11.037911962631947, + "learning_rate": 2.993570761632433e-06, + "loss": 0.5292, + "step": 1421 + }, + { + "epoch": 0.05867789056697202, + "grad_norm": 4.2772976529527815, + "learning_rate": 2.99355220675873e-06, + "loss": 0.6263, + "step": 1422 + }, + { + "epoch": 0.05871915490632995, + "grad_norm": 1.978309000264262, + "learning_rate": 2.993533625206448e-06, + "loss": 0.6023, + "step": 1423 + }, + { + "epoch": 0.058760419245687874, + "grad_norm": 6.326579007682357, + "learning_rate": 2.9935150169759192e-06, + "loss": 0.6192, + "step": 1424 + }, + { + "epoch": 0.058801683585045804, + "grad_norm": 6.297852475265251, + "learning_rate": 2.993496382067476e-06, + "loss": 0.5505, + "step": 1425 + }, + { + "epoch": 0.058842947924403734, + "grad_norm": 2.876835438351902, + "learning_rate": 2.9934777204814513e-06, + "loss": 0.5884, + "step": 1426 + }, + { + "epoch": 0.05888421226376166, + "grad_norm": 5.074991469444648, + "learning_rate": 2.9934590322181786e-06, + "loss": 0.5822, + "step": 1427 + }, + { + "epoch": 0.058925476603119586, + "grad_norm": 4.4630354958196765, + "learning_rate": 2.9934403172779916e-06, + "loss": 0.5593, + "step": 1428 + }, + { + "epoch": 0.05896674094247751, + "grad_norm": 3.807555469453177, + "learning_rate": 2.993421575661224e-06, + "loss": 0.5697, + "step": 1429 + }, + { + "epoch": 0.05900800528183544, + "grad_norm": 3.8758158680763297, + "learning_rate": 2.9934028073682113e-06, + "loss": 0.5875, + "step": 1430 + }, + { + "epoch": 0.05904926962119336, + "grad_norm": 5.86000426879774, + "learning_rate": 2.993384012399289e-06, + "loss": 0.6037, + "step": 1431 + }, + { + "epoch": 0.05909053396055129, + "grad_norm": 10.86713487629747, + "learning_rate": 2.993365190754792e-06, + "loss": 0.5941, + "step": 1432 + }, + { + "epoch": 0.05913179829990922, + "grad_norm": 2.5680242815515495, + "learning_rate": 2.993346342435057e-06, + "loss": 0.5615, + "step": 1433 + }, + { + "epoch": 0.059173062639267145, + "grad_norm": 4.89631148147127, + "learning_rate": 2.9933274674404203e-06, + "loss": 0.5818, + "step": 1434 + }, + { + "epoch": 0.059214326978625074, + "grad_norm": 3.1962505719009253, + "learning_rate": 2.993308565771219e-06, + "loss": 0.6414, + "step": 1435 + }, + { + "epoch": 0.059255591317983, + "grad_norm": 2.583747036305028, + "learning_rate": 2.9932896374277917e-06, + "loss": 0.6053, + "step": 1436 + }, + { + "epoch": 0.05929685565734093, + "grad_norm": 13.634650348087883, + "learning_rate": 2.9932706824104755e-06, + "loss": 0.6169, + "step": 1437 + }, + { + "epoch": 0.05933811999669885, + "grad_norm": 3.1530711827589806, + "learning_rate": 2.9932517007196095e-06, + "loss": 0.6011, + "step": 1438 + }, + { + "epoch": 0.05937938433605678, + "grad_norm": 100.38776569748293, + "learning_rate": 2.9932326923555323e-06, + "loss": 0.5622, + "step": 1439 + }, + { + "epoch": 0.05942064867541471, + "grad_norm": 3.4225542535287214, + "learning_rate": 2.993213657318584e-06, + "loss": 0.6052, + "step": 1440 + }, + { + "epoch": 0.05946191301477263, + "grad_norm": 3.2020462399469003, + "learning_rate": 2.993194595609104e-06, + "loss": 0.6594, + "step": 1441 + }, + { + "epoch": 0.05950317735413056, + "grad_norm": 2.690739863879584, + "learning_rate": 2.993175507227433e-06, + "loss": 0.566, + "step": 1442 + }, + { + "epoch": 0.059544441693488485, + "grad_norm": 2.7495109337498316, + "learning_rate": 2.993156392173913e-06, + "loss": 0.6365, + "step": 1443 + }, + { + "epoch": 0.059585706032846415, + "grad_norm": 3.135403843740253, + "learning_rate": 2.993137250448884e-06, + "loss": 0.5996, + "step": 1444 + }, + { + "epoch": 0.05962697037220434, + "grad_norm": 4.907887024587599, + "learning_rate": 2.9931180820526883e-06, + "loss": 0.5842, + "step": 1445 + }, + { + "epoch": 0.05966823471156227, + "grad_norm": 2.7525229323126603, + "learning_rate": 2.9930988869856687e-06, + "loss": 0.577, + "step": 1446 + }, + { + "epoch": 0.0597094990509202, + "grad_norm": 4.871165417834817, + "learning_rate": 2.993079665248168e-06, + "loss": 0.6097, + "step": 1447 + }, + { + "epoch": 0.05975076339027812, + "grad_norm": 3.0742313999206496, + "learning_rate": 2.9930604168405288e-06, + "loss": 0.5561, + "step": 1448 + }, + { + "epoch": 0.05979202772963605, + "grad_norm": 3.5209300481426364, + "learning_rate": 2.993041141763096e-06, + "loss": 0.5707, + "step": 1449 + }, + { + "epoch": 0.05983329206899397, + "grad_norm": 3.5902379301477048, + "learning_rate": 2.9930218400162126e-06, + "loss": 0.5823, + "step": 1450 + }, + { + "epoch": 0.0598745564083519, + "grad_norm": 5.072885387349025, + "learning_rate": 2.9930025116002248e-06, + "loss": 0.6551, + "step": 1451 + }, + { + "epoch": 0.059915820747709826, + "grad_norm": 4.6558780120582615, + "learning_rate": 2.992983156515477e-06, + "loss": 0.5554, + "step": 1452 + }, + { + "epoch": 0.059957085087067756, + "grad_norm": 3.138084883422881, + "learning_rate": 2.9929637747623153e-06, + "loss": 0.5848, + "step": 1453 + }, + { + "epoch": 0.059998349426425686, + "grad_norm": 4.364816503048904, + "learning_rate": 2.9929443663410857e-06, + "loss": 0.6134, + "step": 1454 + }, + { + "epoch": 0.06003961376578361, + "grad_norm": 8.7659518702716, + "learning_rate": 2.992924931252135e-06, + "loss": 0.5205, + "step": 1455 + }, + { + "epoch": 0.06008087810514154, + "grad_norm": 3.967444105382654, + "learning_rate": 2.9929054694958103e-06, + "loss": 0.6377, + "step": 1456 + }, + { + "epoch": 0.06012214244449946, + "grad_norm": 3.29457845254872, + "learning_rate": 2.9928859810724595e-06, + "loss": 0.5786, + "step": 1457 + }, + { + "epoch": 0.06016340678385739, + "grad_norm": 19.211345598673997, + "learning_rate": 2.99286646598243e-06, + "loss": 0.6055, + "step": 1458 + }, + { + "epoch": 0.060204671123215314, + "grad_norm": 2.5736291373517823, + "learning_rate": 2.992846924226071e-06, + "loss": 0.5431, + "step": 1459 + }, + { + "epoch": 0.060245935462573244, + "grad_norm": 3.294670929844419, + "learning_rate": 2.9928273558037315e-06, + "loss": 0.5999, + "step": 1460 + }, + { + "epoch": 0.060287199801931174, + "grad_norm": 2.7612534638047204, + "learning_rate": 2.9928077607157608e-06, + "loss": 0.5684, + "step": 1461 + }, + { + "epoch": 0.0603284641412891, + "grad_norm": 3.2750384411220588, + "learning_rate": 2.992788138962509e-06, + "loss": 0.5803, + "step": 1462 + }, + { + "epoch": 0.06036972848064703, + "grad_norm": 4.713583294991901, + "learning_rate": 2.992768490544327e-06, + "loss": 0.5918, + "step": 1463 + }, + { + "epoch": 0.06041099282000495, + "grad_norm": 7.1370846145560565, + "learning_rate": 2.992748815461565e-06, + "loss": 0.5735, + "step": 1464 + }, + { + "epoch": 0.06045225715936288, + "grad_norm": 2.990591962091046, + "learning_rate": 2.992729113714575e-06, + "loss": 0.5875, + "step": 1465 + }, + { + "epoch": 0.0604935214987208, + "grad_norm": 5.070846480565682, + "learning_rate": 2.992709385303709e-06, + "loss": 0.6309, + "step": 1466 + }, + { + "epoch": 0.06053478583807873, + "grad_norm": 12.401589942885195, + "learning_rate": 2.992689630229319e-06, + "loss": 0.616, + "step": 1467 + }, + { + "epoch": 0.06057605017743666, + "grad_norm": 1.9811978087167872, + "learning_rate": 2.992669848491758e-06, + "loss": 0.6153, + "step": 1468 + }, + { + "epoch": 0.060617314516794585, + "grad_norm": 9.785294818304521, + "learning_rate": 2.9926500400913794e-06, + "loss": 0.5706, + "step": 1469 + }, + { + "epoch": 0.060658578856152515, + "grad_norm": 3.5024356957715046, + "learning_rate": 2.992630205028537e-06, + "loss": 0.6277, + "step": 1470 + }, + { + "epoch": 0.06069984319551044, + "grad_norm": 7.762577740657455, + "learning_rate": 2.992610343303586e-06, + "loss": 0.5573, + "step": 1471 + }, + { + "epoch": 0.06074110753486837, + "grad_norm": 1.898061094332069, + "learning_rate": 2.992590454916879e-06, + "loss": 0.5611, + "step": 1472 + }, + { + "epoch": 0.06078237187422629, + "grad_norm": 3.067871807042083, + "learning_rate": 2.9925705398687736e-06, + "loss": 0.6256, + "step": 1473 + }, + { + "epoch": 0.06082363621358422, + "grad_norm": 2.205283428901071, + "learning_rate": 2.992550598159624e-06, + "loss": 0.5753, + "step": 1474 + }, + { + "epoch": 0.06086490055294215, + "grad_norm": 2.538173501425813, + "learning_rate": 2.9925306297897873e-06, + "loss": 0.5953, + "step": 1475 + }, + { + "epoch": 0.06090616489230007, + "grad_norm": 2.360548177342395, + "learning_rate": 2.992510634759619e-06, + "loss": 0.5741, + "step": 1476 + }, + { + "epoch": 0.060947429231658, + "grad_norm": 3.7765827660032496, + "learning_rate": 2.992490613069478e-06, + "loss": 0.5699, + "step": 1477 + }, + { + "epoch": 0.060988693571015926, + "grad_norm": 3.7892845691227155, + "learning_rate": 2.9924705647197206e-06, + "loss": 0.6078, + "step": 1478 + }, + { + "epoch": 0.061029957910373855, + "grad_norm": 2.7891086338358733, + "learning_rate": 2.9924504897107055e-06, + "loss": 0.5358, + "step": 1479 + }, + { + "epoch": 0.061071222249731785, + "grad_norm": 3.0104716589421594, + "learning_rate": 2.992430388042791e-06, + "loss": 0.5919, + "step": 1480 + }, + { + "epoch": 0.06111248658908971, + "grad_norm": 5.9614564859079255, + "learning_rate": 2.9924102597163366e-06, + "loss": 0.5776, + "step": 1481 + }, + { + "epoch": 0.06115375092844764, + "grad_norm": 3.1666325615044717, + "learning_rate": 2.992390104731701e-06, + "loss": 0.6359, + "step": 1482 + }, + { + "epoch": 0.06119501526780556, + "grad_norm": 6.33377082310171, + "learning_rate": 2.992369923089245e-06, + "loss": 0.5822, + "step": 1483 + }, + { + "epoch": 0.06123627960716349, + "grad_norm": 2.8943132563313196, + "learning_rate": 2.9923497147893287e-06, + "loss": 0.6153, + "step": 1484 + }, + { + "epoch": 0.061277543946521414, + "grad_norm": 5.434681303098746, + "learning_rate": 2.9923294798323137e-06, + "loss": 0.5562, + "step": 1485 + }, + { + "epoch": 0.06131880828587934, + "grad_norm": 3.0483829242546334, + "learning_rate": 2.9923092182185602e-06, + "loss": 0.6006, + "step": 1486 + }, + { + "epoch": 0.06136007262523727, + "grad_norm": 4.328979925071516, + "learning_rate": 2.9922889299484314e-06, + "loss": 0.592, + "step": 1487 + }, + { + "epoch": 0.061401336964595196, + "grad_norm": 2.775583974029312, + "learning_rate": 2.9922686150222887e-06, + "loss": 0.6437, + "step": 1488 + }, + { + "epoch": 0.061442601303953126, + "grad_norm": 2.7412850960007114, + "learning_rate": 2.992248273440496e-06, + "loss": 0.5885, + "step": 1489 + }, + { + "epoch": 0.06148386564331105, + "grad_norm": 3.593193549402368, + "learning_rate": 2.9922279052034156e-06, + "loss": 0.5992, + "step": 1490 + }, + { + "epoch": 0.06152512998266898, + "grad_norm": 9.26492875240213, + "learning_rate": 2.9922075103114123e-06, + "loss": 0.6133, + "step": 1491 + }, + { + "epoch": 0.0615663943220269, + "grad_norm": 7.111173594264585, + "learning_rate": 2.9921870887648497e-06, + "loss": 0.5824, + "step": 1492 + }, + { + "epoch": 0.06160765866138483, + "grad_norm": 3.1824891405225406, + "learning_rate": 2.9921666405640926e-06, + "loss": 0.5683, + "step": 1493 + }, + { + "epoch": 0.06164892300074276, + "grad_norm": 14.295828643218526, + "learning_rate": 2.9921461657095067e-06, + "loss": 0.5584, + "step": 1494 + }, + { + "epoch": 0.061690187340100684, + "grad_norm": 6.247480624085246, + "learning_rate": 2.9921256642014575e-06, + "loss": 0.5931, + "step": 1495 + }, + { + "epoch": 0.061731451679458614, + "grad_norm": 3.979583592374568, + "learning_rate": 2.9921051360403113e-06, + "loss": 0.6033, + "step": 1496 + }, + { + "epoch": 0.06177271601881654, + "grad_norm": 14.428630558811717, + "learning_rate": 2.9920845812264348e-06, + "loss": 0.6464, + "step": 1497 + }, + { + "epoch": 0.06181398035817447, + "grad_norm": 3.185396007870094, + "learning_rate": 2.9920639997601948e-06, + "loss": 0.5723, + "step": 1498 + }, + { + "epoch": 0.06185524469753239, + "grad_norm": 4.668538896077339, + "learning_rate": 2.9920433916419594e-06, + "loss": 0.5544, + "step": 1499 + }, + { + "epoch": 0.06189650903689032, + "grad_norm": 4.270694209011079, + "learning_rate": 2.9920227568720965e-06, + "loss": 0.548, + "step": 1500 + }, + { + "epoch": 0.06193777337624825, + "grad_norm": 2.5594056941725274, + "learning_rate": 2.9920020954509743e-06, + "loss": 0.5537, + "step": 1501 + }, + { + "epoch": 0.06197903771560617, + "grad_norm": 4.194547457345594, + "learning_rate": 2.9919814073789624e-06, + "loss": 0.6425, + "step": 1502 + }, + { + "epoch": 0.0620203020549641, + "grad_norm": 2.512219561304397, + "learning_rate": 2.9919606926564302e-06, + "loss": 0.5798, + "step": 1503 + }, + { + "epoch": 0.062061566394322025, + "grad_norm": 2.848708335185993, + "learning_rate": 2.9919399512837483e-06, + "loss": 0.5983, + "step": 1504 + }, + { + "epoch": 0.062102830733679955, + "grad_norm": 3.943793800936871, + "learning_rate": 2.991919183261286e-06, + "loss": 0.6212, + "step": 1505 + }, + { + "epoch": 0.06214409507303788, + "grad_norm": 3.0269581798584335, + "learning_rate": 2.991898388589415e-06, + "loss": 0.5713, + "step": 1506 + }, + { + "epoch": 0.06218535941239581, + "grad_norm": 3.4110629464677986, + "learning_rate": 2.9918775672685066e-06, + "loss": 0.642, + "step": 1507 + }, + { + "epoch": 0.06222662375175374, + "grad_norm": 4.952045644405344, + "learning_rate": 2.9918567192989327e-06, + "loss": 0.6592, + "step": 1508 + }, + { + "epoch": 0.06226788809111166, + "grad_norm": 4.230671952476401, + "learning_rate": 2.991835844681066e-06, + "loss": 0.5991, + "step": 1509 + }, + { + "epoch": 0.06230915243046959, + "grad_norm": 2.620803441045859, + "learning_rate": 2.9918149434152786e-06, + "loss": 0.5909, + "step": 1510 + }, + { + "epoch": 0.06235041676982751, + "grad_norm": 4.205497748958049, + "learning_rate": 2.991794015501945e-06, + "loss": 0.6708, + "step": 1511 + }, + { + "epoch": 0.06239168110918544, + "grad_norm": 14.370693047520325, + "learning_rate": 2.9917730609414377e-06, + "loss": 0.5284, + "step": 1512 + }, + { + "epoch": 0.062432945448543366, + "grad_norm": 3.321424426258114, + "learning_rate": 2.991752079734132e-06, + "loss": 0.5217, + "step": 1513 + }, + { + "epoch": 0.062474209787901296, + "grad_norm": 5.378278395670176, + "learning_rate": 2.991731071880403e-06, + "loss": 0.5483, + "step": 1514 + }, + { + "epoch": 0.06251547412725922, + "grad_norm": 2.62608396207817, + "learning_rate": 2.9917100373806244e-06, + "loss": 0.5285, + "step": 1515 + }, + { + "epoch": 0.06255673846661715, + "grad_norm": 3.7359207161117576, + "learning_rate": 2.9916889762351735e-06, + "loss": 0.5786, + "step": 1516 + }, + { + "epoch": 0.06259800280597508, + "grad_norm": 3.0428382468793886, + "learning_rate": 2.9916678884444256e-06, + "loss": 0.6389, + "step": 1517 + }, + { + "epoch": 0.06263926714533301, + "grad_norm": 2.638456059743879, + "learning_rate": 2.9916467740087574e-06, + "loss": 0.5848, + "step": 1518 + }, + { + "epoch": 0.06268053148469092, + "grad_norm": 3.711003556358602, + "learning_rate": 2.9916256329285464e-06, + "loss": 0.5905, + "step": 1519 + }, + { + "epoch": 0.06272179582404885, + "grad_norm": 4.1133165838571575, + "learning_rate": 2.99160446520417e-06, + "loss": 0.5933, + "step": 1520 + }, + { + "epoch": 0.06276306016340678, + "grad_norm": 2.3765228623916235, + "learning_rate": 2.991583270836007e-06, + "loss": 0.5401, + "step": 1521 + }, + { + "epoch": 0.06280432450276471, + "grad_norm": 3.5084590824896167, + "learning_rate": 2.991562049824435e-06, + "loss": 0.5751, + "step": 1522 + }, + { + "epoch": 0.06284558884212264, + "grad_norm": 2.7510049216607686, + "learning_rate": 2.991540802169833e-06, + "loss": 0.5922, + "step": 1523 + }, + { + "epoch": 0.06288685318148056, + "grad_norm": 5.324881007382552, + "learning_rate": 2.991519527872582e-06, + "loss": 0.537, + "step": 1524 + }, + { + "epoch": 0.06292811752083849, + "grad_norm": 9.094179624262082, + "learning_rate": 2.9914982269330604e-06, + "loss": 0.633, + "step": 1525 + }, + { + "epoch": 0.06296938186019642, + "grad_norm": 3.927642162979298, + "learning_rate": 2.9914768993516494e-06, + "loss": 0.5355, + "step": 1526 + }, + { + "epoch": 0.06301064619955435, + "grad_norm": 9.917323116390152, + "learning_rate": 2.99145554512873e-06, + "loss": 0.566, + "step": 1527 + }, + { + "epoch": 0.06305191053891228, + "grad_norm": 3.2758802956113495, + "learning_rate": 2.9914341642646835e-06, + "loss": 0.5715, + "step": 1528 + }, + { + "epoch": 0.0630931748782702, + "grad_norm": 26.381602854738, + "learning_rate": 2.9914127567598914e-06, + "loss": 0.6164, + "step": 1529 + }, + { + "epoch": 0.06313443921762812, + "grad_norm": 6.629666226554291, + "learning_rate": 2.9913913226147375e-06, + "loss": 0.6233, + "step": 1530 + }, + { + "epoch": 0.06317570355698605, + "grad_norm": 19.655352130035713, + "learning_rate": 2.991369861829603e-06, + "loss": 0.5857, + "step": 1531 + }, + { + "epoch": 0.06321696789634398, + "grad_norm": 2.8333469618208333, + "learning_rate": 2.9913483744048716e-06, + "loss": 0.6262, + "step": 1532 + }, + { + "epoch": 0.0632582322357019, + "grad_norm": 5.375181597795378, + "learning_rate": 2.991326860340928e-06, + "loss": 0.6104, + "step": 1533 + }, + { + "epoch": 0.06329949657505983, + "grad_norm": 3.696995394474378, + "learning_rate": 2.991305319638156e-06, + "loss": 0.6084, + "step": 1534 + }, + { + "epoch": 0.06334076091441776, + "grad_norm": 2.5292163120732885, + "learning_rate": 2.99128375229694e-06, + "loss": 0.6558, + "step": 1535 + }, + { + "epoch": 0.06338202525377569, + "grad_norm": 3.289032142779019, + "learning_rate": 2.9912621583176655e-06, + "loss": 0.5941, + "step": 1536 + }, + { + "epoch": 0.06342328959313362, + "grad_norm": 2.5871385130132776, + "learning_rate": 2.9912405377007184e-06, + "loss": 0.554, + "step": 1537 + }, + { + "epoch": 0.06346455393249154, + "grad_norm": 5.707477774329287, + "learning_rate": 2.9912188904464846e-06, + "loss": 0.572, + "step": 1538 + }, + { + "epoch": 0.06350581827184947, + "grad_norm": 2.4863903710946182, + "learning_rate": 2.9911972165553515e-06, + "loss": 0.5934, + "step": 1539 + }, + { + "epoch": 0.0635470826112074, + "grad_norm": 3.014063420388294, + "learning_rate": 2.9911755160277056e-06, + "loss": 0.5755, + "step": 1540 + }, + { + "epoch": 0.06358834695056532, + "grad_norm": 2.6742983264338034, + "learning_rate": 2.991153788863934e-06, + "loss": 0.5106, + "step": 1541 + }, + { + "epoch": 0.06362961128992325, + "grad_norm": 2.650577786707388, + "learning_rate": 2.991132035064426e-06, + "loss": 0.5991, + "step": 1542 + }, + { + "epoch": 0.06367087562928117, + "grad_norm": 3.114907235502346, + "learning_rate": 2.9911102546295697e-06, + "loss": 0.5623, + "step": 1543 + }, + { + "epoch": 0.0637121399686391, + "grad_norm": 3.56740765252228, + "learning_rate": 2.991088447559754e-06, + "loss": 0.5825, + "step": 1544 + }, + { + "epoch": 0.06375340430799703, + "grad_norm": 2.7701661711111676, + "learning_rate": 2.991066613855368e-06, + "loss": 0.5782, + "step": 1545 + }, + { + "epoch": 0.06379466864735496, + "grad_norm": 3.107702097915112, + "learning_rate": 2.991044753516803e-06, + "loss": 0.593, + "step": 1546 + }, + { + "epoch": 0.06383593298671288, + "grad_norm": 2.3396950282273647, + "learning_rate": 2.991022866544448e-06, + "loss": 0.5695, + "step": 1547 + }, + { + "epoch": 0.0638771973260708, + "grad_norm": 5.036864441322037, + "learning_rate": 2.991000952938695e-06, + "loss": 0.5453, + "step": 1548 + }, + { + "epoch": 0.06391846166542874, + "grad_norm": 2.2850572163169884, + "learning_rate": 2.9909790126999346e-06, + "loss": 0.6216, + "step": 1549 + }, + { + "epoch": 0.06395972600478667, + "grad_norm": 2.8543854568013263, + "learning_rate": 2.9909570458285598e-06, + "loss": 0.5856, + "step": 1550 + }, + { + "epoch": 0.0640009903441446, + "grad_norm": 3.2676991719387942, + "learning_rate": 2.990935052324962e-06, + "loss": 0.5893, + "step": 1551 + }, + { + "epoch": 0.06404225468350251, + "grad_norm": 2.889653821185164, + "learning_rate": 2.9909130321895345e-06, + "loss": 0.5771, + "step": 1552 + }, + { + "epoch": 0.06408351902286044, + "grad_norm": 4.122228055825177, + "learning_rate": 2.9908909854226705e-06, + "loss": 0.6161, + "step": 1553 + }, + { + "epoch": 0.06412478336221837, + "grad_norm": 4.628278845701415, + "learning_rate": 2.990868912024764e-06, + "loss": 0.6046, + "step": 1554 + }, + { + "epoch": 0.0641660477015763, + "grad_norm": 2.8067295873443636, + "learning_rate": 2.9908468119962093e-06, + "loss": 0.5906, + "step": 1555 + }, + { + "epoch": 0.06420731204093423, + "grad_norm": 2.719665153076027, + "learning_rate": 2.9908246853374006e-06, + "loss": 0.6221, + "step": 1556 + }, + { + "epoch": 0.06424857638029215, + "grad_norm": 2.730718934950769, + "learning_rate": 2.9908025320487337e-06, + "loss": 0.6049, + "step": 1557 + }, + { + "epoch": 0.06428984071965008, + "grad_norm": 3.7278106668768705, + "learning_rate": 2.990780352130604e-06, + "loss": 0.5899, + "step": 1558 + }, + { + "epoch": 0.064331105059008, + "grad_norm": 7.960056558695377, + "learning_rate": 2.990758145583408e-06, + "loss": 0.5416, + "step": 1559 + }, + { + "epoch": 0.06437236939836594, + "grad_norm": 5.114167831391181, + "learning_rate": 2.990735912407542e-06, + "loss": 0.5996, + "step": 1560 + }, + { + "epoch": 0.06441363373772385, + "grad_norm": 2.1327176241710566, + "learning_rate": 2.990713652603404e-06, + "loss": 0.6092, + "step": 1561 + }, + { + "epoch": 0.06445489807708178, + "grad_norm": 4.585352334949984, + "learning_rate": 2.9906913661713906e-06, + "loss": 0.6241, + "step": 1562 + }, + { + "epoch": 0.06449616241643971, + "grad_norm": 3.543980847744048, + "learning_rate": 2.9906690531119e-06, + "loss": 0.6577, + "step": 1563 + }, + { + "epoch": 0.06453742675579764, + "grad_norm": 3.054094301292587, + "learning_rate": 2.9906467134253314e-06, + "loss": 0.5868, + "step": 1564 + }, + { + "epoch": 0.06457869109515557, + "grad_norm": 4.989128493594896, + "learning_rate": 2.9906243471120833e-06, + "loss": 0.598, + "step": 1565 + }, + { + "epoch": 0.06461995543451349, + "grad_norm": 4.745205360922518, + "learning_rate": 2.9906019541725558e-06, + "loss": 0.6137, + "step": 1566 + }, + { + "epoch": 0.06466121977387142, + "grad_norm": 2.4880836278547287, + "learning_rate": 2.9905795346071476e-06, + "loss": 0.6203, + "step": 1567 + }, + { + "epoch": 0.06470248411322935, + "grad_norm": 3.2842010731208555, + "learning_rate": 2.9905570884162608e-06, + "loss": 0.5562, + "step": 1568 + }, + { + "epoch": 0.06474374845258728, + "grad_norm": 2.5895821886500348, + "learning_rate": 2.990534615600295e-06, + "loss": 0.6251, + "step": 1569 + }, + { + "epoch": 0.0647850127919452, + "grad_norm": 2.8892812847954614, + "learning_rate": 2.990512116159653e-06, + "loss": 0.6138, + "step": 1570 + }, + { + "epoch": 0.06482627713130312, + "grad_norm": 27.603722642634885, + "learning_rate": 2.9904895900947353e-06, + "loss": 0.702, + "step": 1571 + }, + { + "epoch": 0.06486754147066105, + "grad_norm": 2.547596987730476, + "learning_rate": 2.990467037405945e-06, + "loss": 0.6183, + "step": 1572 + }, + { + "epoch": 0.06490880581001898, + "grad_norm": 4.6912226546540365, + "learning_rate": 2.990444458093685e-06, + "loss": 0.611, + "step": 1573 + }, + { + "epoch": 0.06495007014937691, + "grad_norm": 3.4741283391286855, + "learning_rate": 2.9904218521583577e-06, + "loss": 0.6559, + "step": 1574 + }, + { + "epoch": 0.06499133448873484, + "grad_norm": 3.1037050599917686, + "learning_rate": 2.990399219600369e-06, + "loss": 0.6012, + "step": 1575 + }, + { + "epoch": 0.06503259882809276, + "grad_norm": 4.201896759073306, + "learning_rate": 2.9903765604201206e-06, + "loss": 0.6608, + "step": 1576 + }, + { + "epoch": 0.06507386316745069, + "grad_norm": 2.3204311776028694, + "learning_rate": 2.9903538746180193e-06, + "loss": 0.6182, + "step": 1577 + }, + { + "epoch": 0.06511512750680862, + "grad_norm": 2.6686201494131674, + "learning_rate": 2.990331162194469e-06, + "loss": 0.5765, + "step": 1578 + }, + { + "epoch": 0.06515639184616655, + "grad_norm": 3.069034417117591, + "learning_rate": 2.9903084231498763e-06, + "loss": 0.6103, + "step": 1579 + }, + { + "epoch": 0.06519765618552446, + "grad_norm": 1.9192779576120442, + "learning_rate": 2.990285657484647e-06, + "loss": 0.5749, + "step": 1580 + }, + { + "epoch": 0.0652389205248824, + "grad_norm": 5.806291533376763, + "learning_rate": 2.9902628651991873e-06, + "loss": 0.6576, + "step": 1581 + }, + { + "epoch": 0.06528018486424032, + "grad_norm": 2.81685690548349, + "learning_rate": 2.990240046293905e-06, + "loss": 0.5449, + "step": 1582 + }, + { + "epoch": 0.06532144920359825, + "grad_norm": 2.151828002238319, + "learning_rate": 2.9902172007692075e-06, + "loss": 0.5636, + "step": 1583 + }, + { + "epoch": 0.06536271354295618, + "grad_norm": 2.845985380640467, + "learning_rate": 2.9901943286255033e-06, + "loss": 0.588, + "step": 1584 + }, + { + "epoch": 0.0654039778823141, + "grad_norm": 2.968468220998985, + "learning_rate": 2.9901714298631996e-06, + "loss": 0.5728, + "step": 1585 + }, + { + "epoch": 0.06544524222167203, + "grad_norm": 3.137091003051622, + "learning_rate": 2.990148504482707e-06, + "loss": 0.5765, + "step": 1586 + }, + { + "epoch": 0.06548650656102996, + "grad_norm": 2.9099763422806637, + "learning_rate": 2.9901255524844337e-06, + "loss": 0.589, + "step": 1587 + }, + { + "epoch": 0.06552777090038789, + "grad_norm": 3.31985908304647, + "learning_rate": 2.990102573868791e-06, + "loss": 0.5915, + "step": 1588 + }, + { + "epoch": 0.06556903523974582, + "grad_norm": 3.0841904699487093, + "learning_rate": 2.990079568636188e-06, + "loss": 0.5939, + "step": 1589 + }, + { + "epoch": 0.06561029957910373, + "grad_norm": 6.090711884772302, + "learning_rate": 2.990056536787037e-06, + "loss": 0.6203, + "step": 1590 + }, + { + "epoch": 0.06565156391846166, + "grad_norm": 3.877658230726121, + "learning_rate": 2.990033478321748e-06, + "loss": 0.5776, + "step": 1591 + }, + { + "epoch": 0.0656928282578196, + "grad_norm": 3.1718219176918683, + "learning_rate": 2.990010393240734e-06, + "loss": 0.643, + "step": 1592 + }, + { + "epoch": 0.06573409259717752, + "grad_norm": 1.9722518759564405, + "learning_rate": 2.9899872815444072e-06, + "loss": 0.5676, + "step": 1593 + }, + { + "epoch": 0.06577535693653544, + "grad_norm": 6.150615732759632, + "learning_rate": 2.9899641432331795e-06, + "loss": 0.5306, + "step": 1594 + }, + { + "epoch": 0.06581662127589337, + "grad_norm": 5.5988234459356185, + "learning_rate": 2.989940978307465e-06, + "loss": 0.6286, + "step": 1595 + }, + { + "epoch": 0.0658578856152513, + "grad_norm": 8.105455424679953, + "learning_rate": 2.9899177867676777e-06, + "loss": 0.5723, + "step": 1596 + }, + { + "epoch": 0.06589914995460923, + "grad_norm": 13.49744212078056, + "learning_rate": 2.9898945686142316e-06, + "loss": 0.5963, + "step": 1597 + }, + { + "epoch": 0.06594041429396716, + "grad_norm": 2.8592021806027774, + "learning_rate": 2.9898713238475414e-06, + "loss": 0.6365, + "step": 1598 + }, + { + "epoch": 0.06598167863332507, + "grad_norm": 2.8660079056427805, + "learning_rate": 2.9898480524680217e-06, + "loss": 0.6226, + "step": 1599 + }, + { + "epoch": 0.066022942972683, + "grad_norm": 2.5833437182547163, + "learning_rate": 2.9898247544760892e-06, + "loss": 0.6363, + "step": 1600 + }, + { + "epoch": 0.06606420731204093, + "grad_norm": 2.3766439656070157, + "learning_rate": 2.9898014298721595e-06, + "loss": 0.5626, + "step": 1601 + }, + { + "epoch": 0.06610547165139886, + "grad_norm": 4.323408456122891, + "learning_rate": 2.989778078656649e-06, + "loss": 0.5742, + "step": 1602 + }, + { + "epoch": 0.0661467359907568, + "grad_norm": 2.149048323007734, + "learning_rate": 2.989754700829976e-06, + "loss": 0.5496, + "step": 1603 + }, + { + "epoch": 0.06618800033011471, + "grad_norm": 2.74111863607722, + "learning_rate": 2.9897312963925565e-06, + "loss": 0.6017, + "step": 1604 + }, + { + "epoch": 0.06622926466947264, + "grad_norm": 2.5639056283951387, + "learning_rate": 2.98970786534481e-06, + "loss": 0.5886, + "step": 1605 + }, + { + "epoch": 0.06627052900883057, + "grad_norm": 2.6296602019101467, + "learning_rate": 2.9896844076871536e-06, + "loss": 0.5867, + "step": 1606 + }, + { + "epoch": 0.0663117933481885, + "grad_norm": 4.3208191405248755, + "learning_rate": 2.9896609234200074e-06, + "loss": 0.6083, + "step": 1607 + }, + { + "epoch": 0.06635305768754642, + "grad_norm": 2.047850228718438, + "learning_rate": 2.989637412543791e-06, + "loss": 0.5889, + "step": 1608 + }, + { + "epoch": 0.06639432202690435, + "grad_norm": 3.0104732926420916, + "learning_rate": 2.989613875058923e-06, + "loss": 0.5533, + "step": 1609 + }, + { + "epoch": 0.06643558636626228, + "grad_norm": 7.823281864495238, + "learning_rate": 2.9895903109658254e-06, + "loss": 0.6123, + "step": 1610 + }, + { + "epoch": 0.0664768507056202, + "grad_norm": 2.6951139813219456, + "learning_rate": 2.989566720264918e-06, + "loss": 0.5822, + "step": 1611 + }, + { + "epoch": 0.06651811504497813, + "grad_norm": 2.7278481889911563, + "learning_rate": 2.989543102956623e-06, + "loss": 0.5714, + "step": 1612 + }, + { + "epoch": 0.06655937938433605, + "grad_norm": 4.734288236913701, + "learning_rate": 2.9895194590413618e-06, + "loss": 0.5921, + "step": 1613 + }, + { + "epoch": 0.06660064372369398, + "grad_norm": 2.2767414299395616, + "learning_rate": 2.9894957885195568e-06, + "loss": 0.6271, + "step": 1614 + }, + { + "epoch": 0.06664190806305191, + "grad_norm": 3.4748133598168627, + "learning_rate": 2.9894720913916308e-06, + "loss": 0.5988, + "step": 1615 + }, + { + "epoch": 0.06668317240240984, + "grad_norm": 2.9103801901336457, + "learning_rate": 2.9894483676580073e-06, + "loss": 0.6266, + "step": 1616 + }, + { + "epoch": 0.06672443674176777, + "grad_norm": 7.614250656919041, + "learning_rate": 2.9894246173191103e-06, + "loss": 0.6084, + "step": 1617 + }, + { + "epoch": 0.06676570108112569, + "grad_norm": 1.859935091288786, + "learning_rate": 2.9894008403753633e-06, + "loss": 0.647, + "step": 1618 + }, + { + "epoch": 0.06680696542048362, + "grad_norm": 9.731726229457244, + "learning_rate": 2.9893770368271915e-06, + "loss": 0.5427, + "step": 1619 + }, + { + "epoch": 0.06684822975984155, + "grad_norm": 3.8670833254222923, + "learning_rate": 2.9893532066750196e-06, + "loss": 0.6037, + "step": 1620 + }, + { + "epoch": 0.06688949409919948, + "grad_norm": 2.0868806570970864, + "learning_rate": 2.989329349919274e-06, + "loss": 0.5986, + "step": 1621 + }, + { + "epoch": 0.0669307584385574, + "grad_norm": 4.328098972192275, + "learning_rate": 2.98930546656038e-06, + "loss": 0.5657, + "step": 1622 + }, + { + "epoch": 0.06697202277791532, + "grad_norm": 3.1384611017756425, + "learning_rate": 2.989281556598765e-06, + "loss": 0.5527, + "step": 1623 + }, + { + "epoch": 0.06701328711727325, + "grad_norm": 2.42143952305694, + "learning_rate": 2.989257620034856e-06, + "loss": 0.5658, + "step": 1624 + }, + { + "epoch": 0.06705455145663118, + "grad_norm": 4.999418354565772, + "learning_rate": 2.98923365686908e-06, + "loss": 0.6687, + "step": 1625 + }, + { + "epoch": 0.06709581579598911, + "grad_norm": 2.377228039177497, + "learning_rate": 2.989209667101866e-06, + "loss": 0.5964, + "step": 1626 + }, + { + "epoch": 0.06713708013534703, + "grad_norm": 1.8619146851034623, + "learning_rate": 2.9891856507336414e-06, + "loss": 0.5879, + "step": 1627 + }, + { + "epoch": 0.06717834447470496, + "grad_norm": 2.66216542312295, + "learning_rate": 2.9891616077648355e-06, + "loss": 0.5463, + "step": 1628 + }, + { + "epoch": 0.06721960881406289, + "grad_norm": 2.3486091423326747, + "learning_rate": 2.989137538195879e-06, + "loss": 0.6337, + "step": 1629 + }, + { + "epoch": 0.06726087315342082, + "grad_norm": 54.0616752593997, + "learning_rate": 2.9891134420272e-06, + "loss": 0.6157, + "step": 1630 + }, + { + "epoch": 0.06730213749277875, + "grad_norm": 19.369273581648766, + "learning_rate": 2.9890893192592296e-06, + "loss": 0.5931, + "step": 1631 + }, + { + "epoch": 0.06734340183213666, + "grad_norm": 4.482662892895464, + "learning_rate": 2.9890651698923993e-06, + "loss": 0.5831, + "step": 1632 + }, + { + "epoch": 0.06738466617149459, + "grad_norm": 8.418564553586648, + "learning_rate": 2.9890409939271394e-06, + "loss": 0.5902, + "step": 1633 + }, + { + "epoch": 0.06742593051085252, + "grad_norm": 3.882204627921987, + "learning_rate": 2.9890167913638825e-06, + "loss": 0.5761, + "step": 1634 + }, + { + "epoch": 0.06746719485021045, + "grad_norm": 4.5117829930163085, + "learning_rate": 2.9889925622030613e-06, + "loss": 0.5498, + "step": 1635 + }, + { + "epoch": 0.06750845918956838, + "grad_norm": 2.1836030855960535, + "learning_rate": 2.9889683064451075e-06, + "loss": 0.5688, + "step": 1636 + }, + { + "epoch": 0.0675497235289263, + "grad_norm": 4.527481605229174, + "learning_rate": 2.988944024090455e-06, + "loss": 0.6645, + "step": 1637 + }, + { + "epoch": 0.06759098786828423, + "grad_norm": 3.059116578096117, + "learning_rate": 2.988919715139538e-06, + "loss": 0.6021, + "step": 1638 + }, + { + "epoch": 0.06763225220764216, + "grad_norm": 3.637239733695794, + "learning_rate": 2.98889537959279e-06, + "loss": 0.6667, + "step": 1639 + }, + { + "epoch": 0.06767351654700009, + "grad_norm": 15.381871156078278, + "learning_rate": 2.9888710174506453e-06, + "loss": 0.5684, + "step": 1640 + }, + { + "epoch": 0.067714780886358, + "grad_norm": 4.287073438576503, + "learning_rate": 2.98884662871354e-06, + "loss": 0.6328, + "step": 1641 + }, + { + "epoch": 0.06775604522571593, + "grad_norm": 17.399938003091897, + "learning_rate": 2.988822213381909e-06, + "loss": 0.5778, + "step": 1642 + }, + { + "epoch": 0.06779730956507386, + "grad_norm": 3.165385681558134, + "learning_rate": 2.9887977714561896e-06, + "loss": 0.651, + "step": 1643 + }, + { + "epoch": 0.06783857390443179, + "grad_norm": 3.0417316996093042, + "learning_rate": 2.988773302936817e-06, + "loss": 0.5372, + "step": 1644 + }, + { + "epoch": 0.06787983824378972, + "grad_norm": 3.0677980996689205, + "learning_rate": 2.9887488078242287e-06, + "loss": 0.5793, + "step": 1645 + }, + { + "epoch": 0.06792110258314764, + "grad_norm": 4.192831659992608, + "learning_rate": 2.9887242861188634e-06, + "loss": 0.5581, + "step": 1646 + }, + { + "epoch": 0.06796236692250557, + "grad_norm": 2.622438969855731, + "learning_rate": 2.9886997378211574e-06, + "loss": 0.5857, + "step": 1647 + }, + { + "epoch": 0.0680036312618635, + "grad_norm": 2.7753296667536573, + "learning_rate": 2.98867516293155e-06, + "loss": 0.6458, + "step": 1648 + }, + { + "epoch": 0.06804489560122143, + "grad_norm": 3.5919326832760685, + "learning_rate": 2.9886505614504796e-06, + "loss": 0.6367, + "step": 1649 + }, + { + "epoch": 0.06808615994057936, + "grad_norm": 2.4780858051565042, + "learning_rate": 2.988625933378387e-06, + "loss": 0.5593, + "step": 1650 + }, + { + "epoch": 0.06812742427993727, + "grad_norm": 3.428804416026667, + "learning_rate": 2.988601278715711e-06, + "loss": 0.6251, + "step": 1651 + }, + { + "epoch": 0.0681686886192952, + "grad_norm": 3.7245582523523204, + "learning_rate": 2.9885765974628923e-06, + "loss": 0.5593, + "step": 1652 + }, + { + "epoch": 0.06820995295865313, + "grad_norm": 2.707703167372083, + "learning_rate": 2.988551889620371e-06, + "loss": 0.5383, + "step": 1653 + }, + { + "epoch": 0.06825121729801106, + "grad_norm": 2.636167585595324, + "learning_rate": 2.98852715518859e-06, + "loss": 0.5832, + "step": 1654 + }, + { + "epoch": 0.06829248163736898, + "grad_norm": 5.557618891811008, + "learning_rate": 2.98850239416799e-06, + "loss": 0.5653, + "step": 1655 + }, + { + "epoch": 0.06833374597672691, + "grad_norm": 4.00196397349612, + "learning_rate": 2.988477606559014e-06, + "loss": 0.5716, + "step": 1656 + }, + { + "epoch": 0.06837501031608484, + "grad_norm": 5.42221911744447, + "learning_rate": 2.9884527923621037e-06, + "loss": 0.5616, + "step": 1657 + }, + { + "epoch": 0.06841627465544277, + "grad_norm": 9.869288570761126, + "learning_rate": 2.988427951577703e-06, + "loss": 0.5868, + "step": 1658 + }, + { + "epoch": 0.0684575389948007, + "grad_norm": 3.114833791615633, + "learning_rate": 2.988403084206256e-06, + "loss": 0.5877, + "step": 1659 + }, + { + "epoch": 0.06849880333415861, + "grad_norm": 12.856382360279689, + "learning_rate": 2.9883781902482064e-06, + "loss": 0.5971, + "step": 1660 + }, + { + "epoch": 0.06854006767351654, + "grad_norm": 2.905627639061486, + "learning_rate": 2.9883532697039994e-06, + "loss": 0.6207, + "step": 1661 + }, + { + "epoch": 0.06858133201287447, + "grad_norm": 2.4397408045261657, + "learning_rate": 2.9883283225740786e-06, + "loss": 0.5918, + "step": 1662 + }, + { + "epoch": 0.0686225963522324, + "grad_norm": 2.881820913415993, + "learning_rate": 2.9883033488588915e-06, + "loss": 0.6024, + "step": 1663 + }, + { + "epoch": 0.06866386069159033, + "grad_norm": 2.923207946964451, + "learning_rate": 2.9882783485588834e-06, + "loss": 0.6347, + "step": 1664 + }, + { + "epoch": 0.06870512503094825, + "grad_norm": 2.3290187921079255, + "learning_rate": 2.988253321674501e-06, + "loss": 0.5433, + "step": 1665 + }, + { + "epoch": 0.06874638937030618, + "grad_norm": 3.057533986710227, + "learning_rate": 2.9882282682061908e-06, + "loss": 0.6111, + "step": 1666 + }, + { + "epoch": 0.06878765370966411, + "grad_norm": 7.235055691166987, + "learning_rate": 2.988203188154401e-06, + "loss": 0.5928, + "step": 1667 + }, + { + "epoch": 0.06882891804902204, + "grad_norm": 2.2945304772945323, + "learning_rate": 2.9881780815195794e-06, + "loss": 0.5649, + "step": 1668 + }, + { + "epoch": 0.06887018238837996, + "grad_norm": 6.580109572679873, + "learning_rate": 2.988152948302174e-06, + "loss": 0.5381, + "step": 1669 + }, + { + "epoch": 0.06891144672773789, + "grad_norm": 2.6789615271935894, + "learning_rate": 2.988127788502635e-06, + "loss": 0.5516, + "step": 1670 + }, + { + "epoch": 0.06895271106709581, + "grad_norm": 4.425483659847876, + "learning_rate": 2.9881026021214105e-06, + "loss": 0.6162, + "step": 1671 + }, + { + "epoch": 0.06899397540645374, + "grad_norm": 2.4676738132702285, + "learning_rate": 2.9880773891589507e-06, + "loss": 0.6071, + "step": 1672 + }, + { + "epoch": 0.06903523974581167, + "grad_norm": 2.730327043569861, + "learning_rate": 2.9880521496157063e-06, + "loss": 0.577, + "step": 1673 + }, + { + "epoch": 0.06907650408516959, + "grad_norm": 3.6679254549656886, + "learning_rate": 2.9880268834921276e-06, + "loss": 0.6145, + "step": 1674 + }, + { + "epoch": 0.06911776842452752, + "grad_norm": 15.720374564953437, + "learning_rate": 2.988001590788667e-06, + "loss": 0.5555, + "step": 1675 + }, + { + "epoch": 0.06915903276388545, + "grad_norm": 6.779907338750145, + "learning_rate": 2.987976271505775e-06, + "loss": 0.607, + "step": 1676 + }, + { + "epoch": 0.06920029710324338, + "grad_norm": 2.5894171208772634, + "learning_rate": 2.9879509256439045e-06, + "loss": 0.6056, + "step": 1677 + }, + { + "epoch": 0.06924156144260131, + "grad_norm": 2.04027543638186, + "learning_rate": 2.9879255532035086e-06, + "loss": 0.5248, + "step": 1678 + }, + { + "epoch": 0.06928282578195923, + "grad_norm": 2.527215119062727, + "learning_rate": 2.9879001541850396e-06, + "loss": 0.5859, + "step": 1679 + }, + { + "epoch": 0.06932409012131716, + "grad_norm": 2.668091874885513, + "learning_rate": 2.9878747285889518e-06, + "loss": 0.5765, + "step": 1680 + }, + { + "epoch": 0.06936535446067509, + "grad_norm": 6.793976681193422, + "learning_rate": 2.9878492764156997e-06, + "loss": 0.6391, + "step": 1681 + }, + { + "epoch": 0.06940661880003302, + "grad_norm": 2.9116372587171133, + "learning_rate": 2.987823797665737e-06, + "loss": 0.5721, + "step": 1682 + }, + { + "epoch": 0.06944788313939095, + "grad_norm": 2.4678875671674483, + "learning_rate": 2.9877982923395193e-06, + "loss": 0.5819, + "step": 1683 + }, + { + "epoch": 0.06948914747874886, + "grad_norm": 15.161665080477244, + "learning_rate": 2.9877727604375026e-06, + "loss": 0.574, + "step": 1684 + }, + { + "epoch": 0.06953041181810679, + "grad_norm": 2.301689412687105, + "learning_rate": 2.987747201960142e-06, + "loss": 0.5449, + "step": 1685 + }, + { + "epoch": 0.06957167615746472, + "grad_norm": 2.4749966539826938, + "learning_rate": 2.987721616907895e-06, + "loss": 0.5907, + "step": 1686 + }, + { + "epoch": 0.06961294049682265, + "grad_norm": 5.046332325937077, + "learning_rate": 2.9876960052812183e-06, + "loss": 0.5925, + "step": 1687 + }, + { + "epoch": 0.06965420483618057, + "grad_norm": 4.153003780329645, + "learning_rate": 2.9876703670805693e-06, + "loss": 0.5296, + "step": 1688 + }, + { + "epoch": 0.0696954691755385, + "grad_norm": 2.357452976258467, + "learning_rate": 2.9876447023064054e-06, + "loss": 0.5501, + "step": 1689 + }, + { + "epoch": 0.06973673351489643, + "grad_norm": 7.680451669797055, + "learning_rate": 2.987619010959186e-06, + "loss": 0.5603, + "step": 1690 + }, + { + "epoch": 0.06977799785425436, + "grad_norm": 2.5057678956750586, + "learning_rate": 2.9875932930393698e-06, + "loss": 0.5824, + "step": 1691 + }, + { + "epoch": 0.06981926219361229, + "grad_norm": 3.4498561116366933, + "learning_rate": 2.987567548547416e-06, + "loss": 0.603, + "step": 1692 + }, + { + "epoch": 0.0698605265329702, + "grad_norm": 3.243853480945609, + "learning_rate": 2.987541777483784e-06, + "loss": 0.5731, + "step": 1693 + }, + { + "epoch": 0.06990179087232813, + "grad_norm": 2.978281962385941, + "learning_rate": 2.9875159798489348e-06, + "loss": 0.5737, + "step": 1694 + }, + { + "epoch": 0.06994305521168606, + "grad_norm": 3.2522083050289687, + "learning_rate": 2.9874901556433297e-06, + "loss": 0.5761, + "step": 1695 + }, + { + "epoch": 0.06998431955104399, + "grad_norm": 2.8465699118966463, + "learning_rate": 2.9874643048674285e-06, + "loss": 0.6008, + "step": 1696 + }, + { + "epoch": 0.07002558389040192, + "grad_norm": 3.550499601507919, + "learning_rate": 2.9874384275216937e-06, + "loss": 0.5878, + "step": 1697 + }, + { + "epoch": 0.07006684822975984, + "grad_norm": 3.0149316776251016, + "learning_rate": 2.987412523606588e-06, + "loss": 0.5113, + "step": 1698 + }, + { + "epoch": 0.07010811256911777, + "grad_norm": 11.453929727566319, + "learning_rate": 2.9873865931225732e-06, + "loss": 0.668, + "step": 1699 + }, + { + "epoch": 0.0701493769084757, + "grad_norm": 2.58059464807709, + "learning_rate": 2.9873606360701135e-06, + "loss": 0.5743, + "step": 1700 + }, + { + "epoch": 0.07019064124783363, + "grad_norm": 3.112084519411079, + "learning_rate": 2.9873346524496717e-06, + "loss": 0.6893, + "step": 1701 + }, + { + "epoch": 0.07023190558719154, + "grad_norm": 2.4404331887223294, + "learning_rate": 2.9873086422617124e-06, + "loss": 0.583, + "step": 1702 + }, + { + "epoch": 0.07027316992654947, + "grad_norm": 2.0594215240603826, + "learning_rate": 2.9872826055067003e-06, + "loss": 0.5891, + "step": 1703 + }, + { + "epoch": 0.0703144342659074, + "grad_norm": 2.271529854611671, + "learning_rate": 2.9872565421850994e-06, + "loss": 0.6147, + "step": 1704 + }, + { + "epoch": 0.07035569860526533, + "grad_norm": 2.7482514308685926, + "learning_rate": 2.987230452297377e-06, + "loss": 0.639, + "step": 1705 + }, + { + "epoch": 0.07039696294462326, + "grad_norm": 5.259144438278797, + "learning_rate": 2.987204335843998e-06, + "loss": 0.6686, + "step": 1706 + }, + { + "epoch": 0.07043822728398118, + "grad_norm": 6.464442712332485, + "learning_rate": 2.987178192825429e-06, + "loss": 0.6132, + "step": 1707 + }, + { + "epoch": 0.07047949162333911, + "grad_norm": 3.354296471860785, + "learning_rate": 2.987152023242137e-06, + "loss": 0.5846, + "step": 1708 + }, + { + "epoch": 0.07052075596269704, + "grad_norm": 9.18844342617504, + "learning_rate": 2.98712582709459e-06, + "loss": 0.5878, + "step": 1709 + }, + { + "epoch": 0.07056202030205497, + "grad_norm": 3.224496943173057, + "learning_rate": 2.9870996043832553e-06, + "loss": 0.5364, + "step": 1710 + }, + { + "epoch": 0.0706032846414129, + "grad_norm": 4.170176636059401, + "learning_rate": 2.9870733551086017e-06, + "loss": 0.5766, + "step": 1711 + }, + { + "epoch": 0.07064454898077081, + "grad_norm": 3.9253413975387663, + "learning_rate": 2.9870470792710977e-06, + "loss": 0.5528, + "step": 1712 + }, + { + "epoch": 0.07068581332012874, + "grad_norm": 3.887034181801868, + "learning_rate": 2.9870207768712127e-06, + "loss": 0.5471, + "step": 1713 + }, + { + "epoch": 0.07072707765948667, + "grad_norm": 3.316978314144961, + "learning_rate": 2.9869944479094167e-06, + "loss": 0.6001, + "step": 1714 + }, + { + "epoch": 0.0707683419988446, + "grad_norm": 3.9312900207446013, + "learning_rate": 2.9869680923861804e-06, + "loss": 0.5925, + "step": 1715 + }, + { + "epoch": 0.07080960633820252, + "grad_norm": 4.619542308498755, + "learning_rate": 2.9869417103019736e-06, + "loss": 0.6608, + "step": 1716 + }, + { + "epoch": 0.07085087067756045, + "grad_norm": 4.154738122976594, + "learning_rate": 2.9869153016572684e-06, + "loss": 0.5287, + "step": 1717 + }, + { + "epoch": 0.07089213501691838, + "grad_norm": 4.3767913179249245, + "learning_rate": 2.986888866452536e-06, + "loss": 0.5883, + "step": 1718 + }, + { + "epoch": 0.07093339935627631, + "grad_norm": 4.178291471976776, + "learning_rate": 2.986862404688249e-06, + "loss": 0.5382, + "step": 1719 + }, + { + "epoch": 0.07097466369563424, + "grad_norm": 15.32205941437949, + "learning_rate": 2.9868359163648795e-06, + "loss": 0.5772, + "step": 1720 + }, + { + "epoch": 0.07101592803499215, + "grad_norm": 3.6382461991726456, + "learning_rate": 2.9868094014829016e-06, + "loss": 0.6018, + "step": 1721 + }, + { + "epoch": 0.07105719237435008, + "grad_norm": 2.3158883267801103, + "learning_rate": 2.9867828600427887e-06, + "loss": 0.4899, + "step": 1722 + }, + { + "epoch": 0.07109845671370801, + "grad_norm": 3.878670153484651, + "learning_rate": 2.9867562920450136e-06, + "loss": 0.5854, + "step": 1723 + }, + { + "epoch": 0.07113972105306594, + "grad_norm": 2.869753514650413, + "learning_rate": 2.9867296974900524e-06, + "loss": 0.5857, + "step": 1724 + }, + { + "epoch": 0.07118098539242387, + "grad_norm": 3.544473035672693, + "learning_rate": 2.9867030763783796e-06, + "loss": 0.6298, + "step": 1725 + }, + { + "epoch": 0.07122224973178179, + "grad_norm": 3.2485890660060663, + "learning_rate": 2.9866764287104702e-06, + "loss": 0.6211, + "step": 1726 + }, + { + "epoch": 0.07126351407113972, + "grad_norm": 9.358041854423778, + "learning_rate": 2.986649754486801e-06, + "loss": 0.5683, + "step": 1727 + }, + { + "epoch": 0.07130477841049765, + "grad_norm": 4.435198304045458, + "learning_rate": 2.986623053707848e-06, + "loss": 0.5124, + "step": 1728 + }, + { + "epoch": 0.07134604274985558, + "grad_norm": 2.833578616161381, + "learning_rate": 2.9865963263740886e-06, + "loss": 0.6254, + "step": 1729 + }, + { + "epoch": 0.0713873070892135, + "grad_norm": 2.7113054203409477, + "learning_rate": 2.9865695724859996e-06, + "loss": 0.5718, + "step": 1730 + }, + { + "epoch": 0.07142857142857142, + "grad_norm": 7.027075942847941, + "learning_rate": 2.9865427920440596e-06, + "loss": 0.6318, + "step": 1731 + }, + { + "epoch": 0.07146983576792935, + "grad_norm": 3.6537755735592774, + "learning_rate": 2.9865159850487467e-06, + "loss": 0.5287, + "step": 1732 + }, + { + "epoch": 0.07151110010728728, + "grad_norm": 4.74023982615289, + "learning_rate": 2.986489151500539e-06, + "loss": 0.6334, + "step": 1733 + }, + { + "epoch": 0.07155236444664521, + "grad_norm": 3.346169160445864, + "learning_rate": 2.9864622913999167e-06, + "loss": 0.5835, + "step": 1734 + }, + { + "epoch": 0.07159362878600313, + "grad_norm": 4.148706224850554, + "learning_rate": 2.9864354047473596e-06, + "loss": 0.5646, + "step": 1735 + }, + { + "epoch": 0.07163489312536106, + "grad_norm": 3.9237140878299215, + "learning_rate": 2.9864084915433474e-06, + "loss": 0.5445, + "step": 1736 + }, + { + "epoch": 0.07167615746471899, + "grad_norm": 2.8728352941437105, + "learning_rate": 2.986381551788361e-06, + "loss": 0.5745, + "step": 1737 + }, + { + "epoch": 0.07171742180407692, + "grad_norm": 3.110778600955895, + "learning_rate": 2.986354585482882e-06, + "loss": 0.5666, + "step": 1738 + }, + { + "epoch": 0.07175868614343485, + "grad_norm": 2.7743052448465755, + "learning_rate": 2.9863275926273916e-06, + "loss": 0.5721, + "step": 1739 + }, + { + "epoch": 0.07179995048279277, + "grad_norm": 2.884958810217865, + "learning_rate": 2.986300573222372e-06, + "loss": 0.5778, + "step": 1740 + }, + { + "epoch": 0.0718412148221507, + "grad_norm": 2.848213234522332, + "learning_rate": 2.986273527268306e-06, + "loss": 0.5962, + "step": 1741 + }, + { + "epoch": 0.07188247916150863, + "grad_norm": 2.477237399336512, + "learning_rate": 2.986246454765677e-06, + "loss": 0.5924, + "step": 1742 + }, + { + "epoch": 0.07192374350086655, + "grad_norm": 3.2258735547160406, + "learning_rate": 2.9862193557149686e-06, + "loss": 0.6096, + "step": 1743 + }, + { + "epoch": 0.07196500784022448, + "grad_norm": 2.7288420964136777, + "learning_rate": 2.9861922301166643e-06, + "loss": 0.6864, + "step": 1744 + }, + { + "epoch": 0.0720062721795824, + "grad_norm": 2.1781823138555207, + "learning_rate": 2.9861650779712488e-06, + "loss": 0.5684, + "step": 1745 + }, + { + "epoch": 0.07204753651894033, + "grad_norm": 4.477523616595059, + "learning_rate": 2.986137899279207e-06, + "loss": 0.5933, + "step": 1746 + }, + { + "epoch": 0.07208880085829826, + "grad_norm": 2.854672057201696, + "learning_rate": 2.986110694041025e-06, + "loss": 0.5765, + "step": 1747 + }, + { + "epoch": 0.07213006519765619, + "grad_norm": 3.4935424148586818, + "learning_rate": 2.9860834622571876e-06, + "loss": 0.5883, + "step": 1748 + }, + { + "epoch": 0.0721713295370141, + "grad_norm": 6.031407465480162, + "learning_rate": 2.9860562039281827e-06, + "loss": 0.655, + "step": 1749 + }, + { + "epoch": 0.07221259387637204, + "grad_norm": 4.079210654269751, + "learning_rate": 2.9860289190544963e-06, + "loss": 0.5748, + "step": 1750 + }, + { + "epoch": 0.07225385821572997, + "grad_norm": 5.557453603571104, + "learning_rate": 2.986001607636616e-06, + "loss": 0.599, + "step": 1751 + }, + { + "epoch": 0.0722951225550879, + "grad_norm": 2.393597810645245, + "learning_rate": 2.985974269675029e-06, + "loss": 0.6045, + "step": 1752 + }, + { + "epoch": 0.07233638689444583, + "grad_norm": 2.399549001966723, + "learning_rate": 2.9859469051702247e-06, + "loss": 0.6055, + "step": 1753 + }, + { + "epoch": 0.07237765123380374, + "grad_norm": 3.4227708032160518, + "learning_rate": 2.9859195141226914e-06, + "loss": 0.5983, + "step": 1754 + }, + { + "epoch": 0.07241891557316167, + "grad_norm": 3.3110278472942203, + "learning_rate": 2.985892096532918e-06, + "loss": 0.5495, + "step": 1755 + }, + { + "epoch": 0.0724601799125196, + "grad_norm": 2.160081445987161, + "learning_rate": 2.985864652401395e-06, + "loss": 0.5664, + "step": 1756 + }, + { + "epoch": 0.07250144425187753, + "grad_norm": 3.819744283427078, + "learning_rate": 2.985837181728612e-06, + "loss": 0.5748, + "step": 1757 + }, + { + "epoch": 0.07254270859123546, + "grad_norm": 3.0358970645546894, + "learning_rate": 2.9858096845150602e-06, + "loss": 0.5911, + "step": 1758 + }, + { + "epoch": 0.07258397293059338, + "grad_norm": 2.9729294976607568, + "learning_rate": 2.985782160761231e-06, + "loss": 0.6234, + "step": 1759 + }, + { + "epoch": 0.0726252372699513, + "grad_norm": 6.8283695167281735, + "learning_rate": 2.9857546104676144e-06, + "loss": 0.6526, + "step": 1760 + }, + { + "epoch": 0.07266650160930924, + "grad_norm": 2.188626814774935, + "learning_rate": 2.9857270336347044e-06, + "loss": 0.5631, + "step": 1761 + }, + { + "epoch": 0.07270776594866717, + "grad_norm": 2.1554248041396935, + "learning_rate": 2.9856994302629926e-06, + "loss": 0.5249, + "step": 1762 + }, + { + "epoch": 0.07274903028802508, + "grad_norm": 6.151431254011261, + "learning_rate": 2.9856718003529724e-06, + "loss": 0.6188, + "step": 1763 + }, + { + "epoch": 0.07279029462738301, + "grad_norm": 2.002242608475614, + "learning_rate": 2.9856441439051373e-06, + "loss": 0.5495, + "step": 1764 + }, + { + "epoch": 0.07283155896674094, + "grad_norm": 3.7961377582028697, + "learning_rate": 2.9856164609199814e-06, + "loss": 0.5922, + "step": 1765 + }, + { + "epoch": 0.07287282330609887, + "grad_norm": 2.056444480026974, + "learning_rate": 2.985588751397999e-06, + "loss": 0.5646, + "step": 1766 + }, + { + "epoch": 0.0729140876454568, + "grad_norm": 2.507773935787821, + "learning_rate": 2.985561015339685e-06, + "loss": 0.6146, + "step": 1767 + }, + { + "epoch": 0.07295535198481472, + "grad_norm": 8.574125355584854, + "learning_rate": 2.9855332527455346e-06, + "loss": 0.5479, + "step": 1768 + }, + { + "epoch": 0.07299661632417265, + "grad_norm": 3.4141091774850487, + "learning_rate": 2.9855054636160446e-06, + "loss": 0.6265, + "step": 1769 + }, + { + "epoch": 0.07303788066353058, + "grad_norm": 2.6486134603456364, + "learning_rate": 2.985477647951711e-06, + "loss": 0.5836, + "step": 1770 + }, + { + "epoch": 0.07307914500288851, + "grad_norm": 2.2871120155564015, + "learning_rate": 2.98544980575303e-06, + "loss": 0.5575, + "step": 1771 + }, + { + "epoch": 0.07312040934224644, + "grad_norm": 4.8607351370098355, + "learning_rate": 2.9854219370205e-06, + "loss": 0.6239, + "step": 1772 + }, + { + "epoch": 0.07316167368160435, + "grad_norm": 3.9202986021228416, + "learning_rate": 2.985394041754618e-06, + "loss": 0.6312, + "step": 1773 + }, + { + "epoch": 0.07320293802096228, + "grad_norm": 2.4779738770627184, + "learning_rate": 2.985366119955882e-06, + "loss": 0.591, + "step": 1774 + }, + { + "epoch": 0.07324420236032021, + "grad_norm": 4.977900199596729, + "learning_rate": 2.985338171624792e-06, + "loss": 0.6084, + "step": 1775 + }, + { + "epoch": 0.07328546669967814, + "grad_norm": 2.588472258205464, + "learning_rate": 2.985310196761846e-06, + "loss": 0.593, + "step": 1776 + }, + { + "epoch": 0.07332673103903606, + "grad_norm": 2.5181389457683685, + "learning_rate": 2.9852821953675445e-06, + "loss": 0.6448, + "step": 1777 + }, + { + "epoch": 0.07336799537839399, + "grad_norm": 2.5612890151610865, + "learning_rate": 2.9852541674423874e-06, + "loss": 0.539, + "step": 1778 + }, + { + "epoch": 0.07340925971775192, + "grad_norm": 3.4815662209434275, + "learning_rate": 2.985226112986875e-06, + "loss": 0.528, + "step": 1779 + }, + { + "epoch": 0.07345052405710985, + "grad_norm": 2.8230057212342583, + "learning_rate": 2.9851980320015097e-06, + "loss": 0.5508, + "step": 1780 + }, + { + "epoch": 0.07349178839646778, + "grad_norm": 5.458208403311748, + "learning_rate": 2.985169924486791e-06, + "loss": 0.5977, + "step": 1781 + }, + { + "epoch": 0.0735330527358257, + "grad_norm": 5.768470368558419, + "learning_rate": 2.985141790443223e-06, + "loss": 0.526, + "step": 1782 + }, + { + "epoch": 0.07357431707518362, + "grad_norm": 3.0553274036906752, + "learning_rate": 2.9851136298713066e-06, + "loss": 0.6159, + "step": 1783 + }, + { + "epoch": 0.07361558141454155, + "grad_norm": 3.386920385204912, + "learning_rate": 2.985085442771546e-06, + "loss": 0.5686, + "step": 1784 + }, + { + "epoch": 0.07365684575389948, + "grad_norm": 10.080100749223242, + "learning_rate": 2.9850572291444443e-06, + "loss": 0.5757, + "step": 1785 + }, + { + "epoch": 0.07369811009325741, + "grad_norm": 4.065003393801341, + "learning_rate": 2.9850289889905054e-06, + "loss": 0.6401, + "step": 1786 + }, + { + "epoch": 0.07373937443261533, + "grad_norm": 3.2794886086551185, + "learning_rate": 2.9850007223102335e-06, + "loss": 0.5406, + "step": 1787 + }, + { + "epoch": 0.07378063877197326, + "grad_norm": 2.4316682121301887, + "learning_rate": 2.9849724291041343e-06, + "loss": 0.6115, + "step": 1788 + }, + { + "epoch": 0.07382190311133119, + "grad_norm": 9.463301519976232, + "learning_rate": 2.9849441093727125e-06, + "loss": 0.6216, + "step": 1789 + }, + { + "epoch": 0.07386316745068912, + "grad_norm": 2.443287230935687, + "learning_rate": 2.984915763116474e-06, + "loss": 0.5551, + "step": 1790 + }, + { + "epoch": 0.07390443179004705, + "grad_norm": 2.649984000556022, + "learning_rate": 2.9848873903359254e-06, + "loss": 0.5852, + "step": 1791 + }, + { + "epoch": 0.07394569612940496, + "grad_norm": 2.877798242338757, + "learning_rate": 2.984858991031573e-06, + "loss": 0.6025, + "step": 1792 + }, + { + "epoch": 0.0739869604687629, + "grad_norm": 3.3745909719018186, + "learning_rate": 2.984830565203925e-06, + "loss": 0.6078, + "step": 1793 + }, + { + "epoch": 0.07402822480812082, + "grad_norm": 2.7288058611821553, + "learning_rate": 2.9848021128534884e-06, + "loss": 0.5894, + "step": 1794 + }, + { + "epoch": 0.07406948914747875, + "grad_norm": 5.531486028970645, + "learning_rate": 2.984773633980772e-06, + "loss": 0.6116, + "step": 1795 + }, + { + "epoch": 0.07411075348683667, + "grad_norm": 4.176316631794981, + "learning_rate": 2.984745128586284e-06, + "loss": 0.5705, + "step": 1796 + }, + { + "epoch": 0.0741520178261946, + "grad_norm": 3.24268634989684, + "learning_rate": 2.9847165966705338e-06, + "loss": 0.5972, + "step": 1797 + }, + { + "epoch": 0.07419328216555253, + "grad_norm": 4.108676245980379, + "learning_rate": 2.9846880382340305e-06, + "loss": 0.6254, + "step": 1798 + }, + { + "epoch": 0.07423454650491046, + "grad_norm": 3.3629355730275012, + "learning_rate": 2.984659453277285e-06, + "loss": 0.5853, + "step": 1799 + }, + { + "epoch": 0.07427581084426839, + "grad_norm": 2.21120431067138, + "learning_rate": 2.9846308418008075e-06, + "loss": 0.5724, + "step": 1800 + }, + { + "epoch": 0.0743170751836263, + "grad_norm": 6.945586368151555, + "learning_rate": 2.9846022038051097e-06, + "loss": 0.6001, + "step": 1801 + }, + { + "epoch": 0.07435833952298423, + "grad_norm": 2.6200008012335205, + "learning_rate": 2.984573539290702e-06, + "loss": 0.5934, + "step": 1802 + }, + { + "epoch": 0.07439960386234216, + "grad_norm": 2.740397804401028, + "learning_rate": 2.9845448482580973e-06, + "loss": 0.5776, + "step": 1803 + }, + { + "epoch": 0.0744408682017001, + "grad_norm": 2.426700170095658, + "learning_rate": 2.984516130707808e-06, + "loss": 0.5878, + "step": 1804 + }, + { + "epoch": 0.07448213254105802, + "grad_norm": 6.830443741888196, + "learning_rate": 2.984487386640347e-06, + "loss": 0.5685, + "step": 1805 + }, + { + "epoch": 0.07452339688041594, + "grad_norm": 2.6354427077231892, + "learning_rate": 2.984458616056227e-06, + "loss": 0.6703, + "step": 1806 + }, + { + "epoch": 0.07456466121977387, + "grad_norm": 2.2185673106931967, + "learning_rate": 2.9844298189559628e-06, + "loss": 0.6199, + "step": 1807 + }, + { + "epoch": 0.0746059255591318, + "grad_norm": 2.664771634188553, + "learning_rate": 2.984400995340069e-06, + "loss": 0.5886, + "step": 1808 + }, + { + "epoch": 0.07464718989848973, + "grad_norm": 19.187391124314008, + "learning_rate": 2.98437214520906e-06, + "loss": 0.5674, + "step": 1809 + }, + { + "epoch": 0.07468845423784765, + "grad_norm": 3.4486469118803558, + "learning_rate": 2.9843432685634508e-06, + "loss": 0.5704, + "step": 1810 + }, + { + "epoch": 0.07472971857720558, + "grad_norm": 3.8630939093355674, + "learning_rate": 2.9843143654037576e-06, + "loss": 0.5795, + "step": 1811 + }, + { + "epoch": 0.0747709829165635, + "grad_norm": 4.087237720728977, + "learning_rate": 2.9842854357304966e-06, + "loss": 0.6119, + "step": 1812 + }, + { + "epoch": 0.07481224725592144, + "grad_norm": 14.905350601303306, + "learning_rate": 2.984256479544185e-06, + "loss": 0.6481, + "step": 1813 + }, + { + "epoch": 0.07485351159527937, + "grad_norm": 3.3910535967651576, + "learning_rate": 2.9842274968453393e-06, + "loss": 0.5793, + "step": 1814 + }, + { + "epoch": 0.07489477593463728, + "grad_norm": 7.245105658326522, + "learning_rate": 2.9841984876344775e-06, + "loss": 0.602, + "step": 1815 + }, + { + "epoch": 0.07493604027399521, + "grad_norm": 2.8875742543946363, + "learning_rate": 2.984169451912118e-06, + "loss": 0.6294, + "step": 1816 + }, + { + "epoch": 0.07497730461335314, + "grad_norm": 4.065336658004683, + "learning_rate": 2.9841403896787794e-06, + "loss": 0.6161, + "step": 1817 + }, + { + "epoch": 0.07501856895271107, + "grad_norm": 2.18914118637051, + "learning_rate": 2.9841113009349802e-06, + "loss": 0.5475, + "step": 1818 + }, + { + "epoch": 0.075059833292069, + "grad_norm": 6.6853479045999915, + "learning_rate": 2.984082185681241e-06, + "loss": 0.5744, + "step": 1819 + }, + { + "epoch": 0.07510109763142692, + "grad_norm": 3.5870757950154957, + "learning_rate": 2.984053043918081e-06, + "loss": 0.6187, + "step": 1820 + }, + { + "epoch": 0.07514236197078485, + "grad_norm": 4.328053650031275, + "learning_rate": 2.9840238756460215e-06, + "loss": 0.5547, + "step": 1821 + }, + { + "epoch": 0.07518362631014278, + "grad_norm": 3.2563873771549363, + "learning_rate": 2.983994680865583e-06, + "loss": 0.5254, + "step": 1822 + }, + { + "epoch": 0.0752248906495007, + "grad_norm": 4.740532958767665, + "learning_rate": 2.9839654595772867e-06, + "loss": 0.6343, + "step": 1823 + }, + { + "epoch": 0.07526615498885862, + "grad_norm": 4.313914805263841, + "learning_rate": 2.9839362117816552e-06, + "loss": 0.5757, + "step": 1824 + }, + { + "epoch": 0.07530741932821655, + "grad_norm": 10.123855627686616, + "learning_rate": 2.983906937479211e-06, + "loss": 0.6084, + "step": 1825 + }, + { + "epoch": 0.07534868366757448, + "grad_norm": 4.996454235432176, + "learning_rate": 2.9838776366704762e-06, + "loss": 0.5488, + "step": 1826 + }, + { + "epoch": 0.07538994800693241, + "grad_norm": 2.814136142815788, + "learning_rate": 2.9838483093559753e-06, + "loss": 0.5957, + "step": 1827 + }, + { + "epoch": 0.07543121234629034, + "grad_norm": 4.016242696368831, + "learning_rate": 2.983818955536231e-06, + "loss": 0.6047, + "step": 1828 + }, + { + "epoch": 0.07547247668564826, + "grad_norm": 3.7858413043864747, + "learning_rate": 2.983789575211769e-06, + "loss": 0.5927, + "step": 1829 + }, + { + "epoch": 0.07551374102500619, + "grad_norm": 7.608857638581101, + "learning_rate": 2.9837601683831126e-06, + "loss": 0.6042, + "step": 1830 + }, + { + "epoch": 0.07555500536436412, + "grad_norm": 3.181910205545375, + "learning_rate": 2.9837307350507882e-06, + "loss": 0.5255, + "step": 1831 + }, + { + "epoch": 0.07559626970372205, + "grad_norm": 8.378578895688863, + "learning_rate": 2.983701275215321e-06, + "loss": 0.5618, + "step": 1832 + }, + { + "epoch": 0.07563753404307998, + "grad_norm": 5.557948956022938, + "learning_rate": 2.9836717888772374e-06, + "loss": 0.5718, + "step": 1833 + }, + { + "epoch": 0.07567879838243789, + "grad_norm": 3.981471869670329, + "learning_rate": 2.9836422760370638e-06, + "loss": 0.634, + "step": 1834 + }, + { + "epoch": 0.07572006272179582, + "grad_norm": 2.605435707831904, + "learning_rate": 2.983612736695328e-06, + "loss": 0.6174, + "step": 1835 + }, + { + "epoch": 0.07576132706115375, + "grad_norm": 3.786242914080626, + "learning_rate": 2.983583170852557e-06, + "loss": 0.559, + "step": 1836 + }, + { + "epoch": 0.07580259140051168, + "grad_norm": 9.419111837278114, + "learning_rate": 2.983553578509279e-06, + "loss": 0.5479, + "step": 1837 + }, + { + "epoch": 0.0758438557398696, + "grad_norm": 3.6649911947067477, + "learning_rate": 2.983523959666024e-06, + "loss": 0.5921, + "step": 1838 + }, + { + "epoch": 0.07588512007922753, + "grad_norm": 4.023257472102841, + "learning_rate": 2.9834943143233186e-06, + "loss": 0.5933, + "step": 1839 + }, + { + "epoch": 0.07592638441858546, + "grad_norm": 2.8164981754051652, + "learning_rate": 2.983464642481694e-06, + "loss": 0.6329, + "step": 1840 + }, + { + "epoch": 0.07596764875794339, + "grad_norm": 3.4135952710846036, + "learning_rate": 2.98343494414168e-06, + "loss": 0.5676, + "step": 1841 + }, + { + "epoch": 0.07600891309730132, + "grad_norm": 66.32567874938987, + "learning_rate": 2.9834052193038067e-06, + "loss": 0.5805, + "step": 1842 + }, + { + "epoch": 0.07605017743665923, + "grad_norm": 3.1983913752777897, + "learning_rate": 2.983375467968605e-06, + "loss": 0.5857, + "step": 1843 + }, + { + "epoch": 0.07609144177601716, + "grad_norm": 3.4620182190492335, + "learning_rate": 2.9833456901366076e-06, + "loss": 0.538, + "step": 1844 + }, + { + "epoch": 0.07613270611537509, + "grad_norm": 4.463672718074359, + "learning_rate": 2.9833158858083445e-06, + "loss": 0.6027, + "step": 1845 + }, + { + "epoch": 0.07617397045473302, + "grad_norm": 12.378413884112605, + "learning_rate": 2.983286054984349e-06, + "loss": 0.6057, + "step": 1846 + }, + { + "epoch": 0.07621523479409095, + "grad_norm": 3.154618989762676, + "learning_rate": 2.9832561976651548e-06, + "loss": 0.5095, + "step": 1847 + }, + { + "epoch": 0.07625649913344887, + "grad_norm": 2.897132735234882, + "learning_rate": 2.9832263138512932e-06, + "loss": 0.5931, + "step": 1848 + }, + { + "epoch": 0.0762977634728068, + "grad_norm": 9.388333313416636, + "learning_rate": 2.9831964035433003e-06, + "loss": 0.6372, + "step": 1849 + }, + { + "epoch": 0.07633902781216473, + "grad_norm": 2.475303547009766, + "learning_rate": 2.9831664667417084e-06, + "loss": 0.534, + "step": 1850 + }, + { + "epoch": 0.07638029215152266, + "grad_norm": 2.9673059185203776, + "learning_rate": 2.9831365034470532e-06, + "loss": 0.5659, + "step": 1851 + }, + { + "epoch": 0.07642155649088059, + "grad_norm": 7.852361550063281, + "learning_rate": 2.98310651365987e-06, + "loss": 0.5989, + "step": 1852 + }, + { + "epoch": 0.0764628208302385, + "grad_norm": 4.457745835435415, + "learning_rate": 2.9830764973806944e-06, + "loss": 0.6007, + "step": 1853 + }, + { + "epoch": 0.07650408516959643, + "grad_norm": 2.0800613242360533, + "learning_rate": 2.9830464546100626e-06, + "loss": 0.4951, + "step": 1854 + }, + { + "epoch": 0.07654534950895436, + "grad_norm": 4.967880118562482, + "learning_rate": 2.9830163853485105e-06, + "loss": 0.6338, + "step": 1855 + }, + { + "epoch": 0.0765866138483123, + "grad_norm": 3.3566561641412944, + "learning_rate": 2.9829862895965765e-06, + "loss": 0.5662, + "step": 1856 + }, + { + "epoch": 0.07662787818767021, + "grad_norm": 2.8003324049396503, + "learning_rate": 2.982956167354797e-06, + "loss": 0.5375, + "step": 1857 + }, + { + "epoch": 0.07666914252702814, + "grad_norm": 5.114863382525335, + "learning_rate": 2.982926018623711e-06, + "loss": 0.5512, + "step": 1858 + }, + { + "epoch": 0.07671040686638607, + "grad_norm": 156.30143734453236, + "learning_rate": 2.982895843403856e-06, + "loss": 0.6069, + "step": 1859 + }, + { + "epoch": 0.076751671205744, + "grad_norm": 5.096538806433927, + "learning_rate": 2.9828656416957724e-06, + "loss": 0.6081, + "step": 1860 + }, + { + "epoch": 0.07679293554510193, + "grad_norm": 3.6095447345864415, + "learning_rate": 2.982835413499998e-06, + "loss": 0.6352, + "step": 1861 + }, + { + "epoch": 0.07683419988445984, + "grad_norm": 2.7481936834569676, + "learning_rate": 2.982805158817074e-06, + "loss": 0.6316, + "step": 1862 + }, + { + "epoch": 0.07687546422381777, + "grad_norm": 2.2545240877144765, + "learning_rate": 2.982774877647541e-06, + "loss": 0.5941, + "step": 1863 + }, + { + "epoch": 0.0769167285631757, + "grad_norm": 4.381973119476549, + "learning_rate": 2.9827445699919383e-06, + "loss": 0.6745, + "step": 1864 + }, + { + "epoch": 0.07695799290253363, + "grad_norm": 3.886670988019235, + "learning_rate": 2.9827142358508094e-06, + "loss": 0.5816, + "step": 1865 + }, + { + "epoch": 0.07699925724189156, + "grad_norm": 2.267185781440775, + "learning_rate": 2.9826838752246944e-06, + "loss": 0.5928, + "step": 1866 + }, + { + "epoch": 0.07704052158124948, + "grad_norm": 2.2646331586441284, + "learning_rate": 2.9826534881141365e-06, + "loss": 0.5859, + "step": 1867 + }, + { + "epoch": 0.07708178592060741, + "grad_norm": 4.18553211129377, + "learning_rate": 2.982623074519678e-06, + "loss": 0.5969, + "step": 1868 + }, + { + "epoch": 0.07712305025996534, + "grad_norm": 3.082200089475153, + "learning_rate": 2.9825926344418626e-06, + "loss": 0.6102, + "step": 1869 + }, + { + "epoch": 0.07716431459932327, + "grad_norm": 5.902435272113264, + "learning_rate": 2.9825621678812337e-06, + "loss": 0.6247, + "step": 1870 + }, + { + "epoch": 0.07720557893868119, + "grad_norm": 2.798415635372394, + "learning_rate": 2.982531674838336e-06, + "loss": 0.606, + "step": 1871 + }, + { + "epoch": 0.07724684327803912, + "grad_norm": 24.335341836970358, + "learning_rate": 2.982501155313714e-06, + "loss": 0.519, + "step": 1872 + }, + { + "epoch": 0.07728810761739704, + "grad_norm": 2.9639607773435515, + "learning_rate": 2.9824706093079124e-06, + "loss": 0.6444, + "step": 1873 + }, + { + "epoch": 0.07732937195675497, + "grad_norm": 3.4528505521736914, + "learning_rate": 2.982440036821477e-06, + "loss": 0.594, + "step": 1874 + }, + { + "epoch": 0.0773706362961129, + "grad_norm": 2.232508752996571, + "learning_rate": 2.9824094378549542e-06, + "loss": 0.5109, + "step": 1875 + }, + { + "epoch": 0.07741190063547082, + "grad_norm": 6.054401891728994, + "learning_rate": 2.9823788124088906e-06, + "loss": 0.6084, + "step": 1876 + }, + { + "epoch": 0.07745316497482875, + "grad_norm": 3.77651587286979, + "learning_rate": 2.982348160483833e-06, + "loss": 0.5588, + "step": 1877 + }, + { + "epoch": 0.07749442931418668, + "grad_norm": 3.7855694091207877, + "learning_rate": 2.9823174820803287e-06, + "loss": 0.5927, + "step": 1878 + }, + { + "epoch": 0.07753569365354461, + "grad_norm": 3.678311395649849, + "learning_rate": 2.9822867771989265e-06, + "loss": 0.5599, + "step": 1879 + }, + { + "epoch": 0.07757695799290254, + "grad_norm": 3.2423409299781967, + "learning_rate": 2.982256045840174e-06, + "loss": 0.5919, + "step": 1880 + }, + { + "epoch": 0.07761822233226046, + "grad_norm": 2.7311807353272557, + "learning_rate": 2.9822252880046204e-06, + "loss": 0.5692, + "step": 1881 + }, + { + "epoch": 0.07765948667161839, + "grad_norm": 3.8069780278215823, + "learning_rate": 2.9821945036928154e-06, + "loss": 0.6032, + "step": 1882 + }, + { + "epoch": 0.07770075101097632, + "grad_norm": 4.074897034163524, + "learning_rate": 2.982163692905309e-06, + "loss": 0.5383, + "step": 1883 + }, + { + "epoch": 0.07774201535033425, + "grad_norm": 2.6550578927845465, + "learning_rate": 2.9821328556426507e-06, + "loss": 0.5969, + "step": 1884 + }, + { + "epoch": 0.07778327968969216, + "grad_norm": 2.5600390513714886, + "learning_rate": 2.982101991905392e-06, + "loss": 0.6115, + "step": 1885 + }, + { + "epoch": 0.07782454402905009, + "grad_norm": 4.875002069384092, + "learning_rate": 2.9820711016940837e-06, + "loss": 0.5642, + "step": 1886 + }, + { + "epoch": 0.07786580836840802, + "grad_norm": 7.6428535340795385, + "learning_rate": 2.9820401850092784e-06, + "loss": 0.6002, + "step": 1887 + }, + { + "epoch": 0.07790707270776595, + "grad_norm": 2.850717250740569, + "learning_rate": 2.982009241851528e-06, + "loss": 0.5828, + "step": 1888 + }, + { + "epoch": 0.07794833704712388, + "grad_norm": 20.30523868921307, + "learning_rate": 2.981978272221385e-06, + "loss": 0.5985, + "step": 1889 + }, + { + "epoch": 0.0779896013864818, + "grad_norm": 4.03764308770247, + "learning_rate": 2.9819472761194026e-06, + "loss": 0.6181, + "step": 1890 + }, + { + "epoch": 0.07803086572583973, + "grad_norm": 2.190054388238632, + "learning_rate": 2.981916253546135e-06, + "loss": 0.5745, + "step": 1891 + }, + { + "epoch": 0.07807213006519766, + "grad_norm": 2.6238313654805285, + "learning_rate": 2.981885204502135e-06, + "loss": 0.5347, + "step": 1892 + }, + { + "epoch": 0.07811339440455559, + "grad_norm": 2.5403555685083976, + "learning_rate": 2.981854128987959e-06, + "loss": 0.6309, + "step": 1893 + }, + { + "epoch": 0.07815465874391352, + "grad_norm": 2.598271484107276, + "learning_rate": 2.981823027004161e-06, + "loss": 0.6255, + "step": 1894 + }, + { + "epoch": 0.07819592308327143, + "grad_norm": 7.733358623974244, + "learning_rate": 2.981791898551297e-06, + "loss": 0.5813, + "step": 1895 + }, + { + "epoch": 0.07823718742262936, + "grad_norm": 3.8964717458843086, + "learning_rate": 2.9817607436299223e-06, + "loss": 0.5334, + "step": 1896 + }, + { + "epoch": 0.07827845176198729, + "grad_norm": 3.9080408282668273, + "learning_rate": 2.9817295622405943e-06, + "loss": 0.6458, + "step": 1897 + }, + { + "epoch": 0.07831971610134522, + "grad_norm": 11.938466796596783, + "learning_rate": 2.9816983543838693e-06, + "loss": 0.593, + "step": 1898 + }, + { + "epoch": 0.07836098044070315, + "grad_norm": 20.023057365006597, + "learning_rate": 2.9816671200603053e-06, + "loss": 0.5691, + "step": 1899 + }, + { + "epoch": 0.07840224478006107, + "grad_norm": 2.876302296323296, + "learning_rate": 2.98163585927046e-06, + "loss": 0.6077, + "step": 1900 + }, + { + "epoch": 0.078443509119419, + "grad_norm": 12.748457807050817, + "learning_rate": 2.9816045720148913e-06, + "loss": 0.5409, + "step": 1901 + }, + { + "epoch": 0.07848477345877693, + "grad_norm": 5.513148701122761, + "learning_rate": 2.981573258294159e-06, + "loss": 0.5744, + "step": 1902 + }, + { + "epoch": 0.07852603779813486, + "grad_norm": 2.8257974305697737, + "learning_rate": 2.981541918108822e-06, + "loss": 0.559, + "step": 1903 + }, + { + "epoch": 0.07856730213749277, + "grad_norm": 2.2216339716085343, + "learning_rate": 2.98151055145944e-06, + "loss": 0.5468, + "step": 1904 + }, + { + "epoch": 0.0786085664768507, + "grad_norm": 7.617646950765789, + "learning_rate": 2.9814791583465734e-06, + "loss": 0.6461, + "step": 1905 + }, + { + "epoch": 0.07864983081620863, + "grad_norm": 4.013063710320891, + "learning_rate": 2.9814477387707827e-06, + "loss": 0.5855, + "step": 1906 + }, + { + "epoch": 0.07869109515556656, + "grad_norm": 4.246812964546333, + "learning_rate": 2.98141629273263e-06, + "loss": 0.5749, + "step": 1907 + }, + { + "epoch": 0.07873235949492449, + "grad_norm": 3.1091571288342634, + "learning_rate": 2.9813848202326757e-06, + "loss": 0.618, + "step": 1908 + }, + { + "epoch": 0.07877362383428241, + "grad_norm": 7.024685779647311, + "learning_rate": 2.981353321271483e-06, + "loss": 0.6163, + "step": 1909 + }, + { + "epoch": 0.07881488817364034, + "grad_norm": 2.7452239526905413, + "learning_rate": 2.981321795849614e-06, + "loss": 0.6236, + "step": 1910 + }, + { + "epoch": 0.07885615251299827, + "grad_norm": 4.073717242159453, + "learning_rate": 2.9812902439676322e-06, + "loss": 0.6465, + "step": 1911 + }, + { + "epoch": 0.0788974168523562, + "grad_norm": 3.134422569746342, + "learning_rate": 2.981258665626101e-06, + "loss": 0.5753, + "step": 1912 + }, + { + "epoch": 0.07893868119171413, + "grad_norm": 7.584470114027728, + "learning_rate": 2.9812270608255842e-06, + "loss": 0.6216, + "step": 1913 + }, + { + "epoch": 0.07897994553107204, + "grad_norm": 2.6268303342189516, + "learning_rate": 2.981195429566647e-06, + "loss": 0.5347, + "step": 1914 + }, + { + "epoch": 0.07902120987042997, + "grad_norm": 9.58785273977617, + "learning_rate": 2.9811637718498535e-06, + "loss": 0.5656, + "step": 1915 + }, + { + "epoch": 0.0790624742097879, + "grad_norm": 2.3286214817364614, + "learning_rate": 2.9811320876757707e-06, + "loss": 0.6005, + "step": 1916 + }, + { + "epoch": 0.07910373854914583, + "grad_norm": 6.417761497980284, + "learning_rate": 2.9811003770449628e-06, + "loss": 0.5727, + "step": 1917 + }, + { + "epoch": 0.07914500288850375, + "grad_norm": 6.3854339754147045, + "learning_rate": 2.9810686399579972e-06, + "loss": 0.5856, + "step": 1918 + }, + { + "epoch": 0.07918626722786168, + "grad_norm": 3.843853937758381, + "learning_rate": 2.9810368764154407e-06, + "loss": 0.6188, + "step": 1919 + }, + { + "epoch": 0.07922753156721961, + "grad_norm": 4.821459302560812, + "learning_rate": 2.9810050864178604e-06, + "loss": 0.6407, + "step": 1920 + }, + { + "epoch": 0.07926879590657754, + "grad_norm": 2.617090975970566, + "learning_rate": 2.980973269965825e-06, + "loss": 0.593, + "step": 1921 + }, + { + "epoch": 0.07931006024593547, + "grad_norm": 2.3540526319791564, + "learning_rate": 2.9809414270599015e-06, + "loss": 0.5955, + "step": 1922 + }, + { + "epoch": 0.07935132458529338, + "grad_norm": 3.7258872048370444, + "learning_rate": 2.9809095577006593e-06, + "loss": 0.5826, + "step": 1923 + }, + { + "epoch": 0.07939258892465131, + "grad_norm": 2.3788364137353684, + "learning_rate": 2.9808776618886683e-06, + "loss": 0.5823, + "step": 1924 + }, + { + "epoch": 0.07943385326400924, + "grad_norm": 5.64157303505045, + "learning_rate": 2.980845739624497e-06, + "loss": 0.6198, + "step": 1925 + }, + { + "epoch": 0.07947511760336717, + "grad_norm": 3.215194849034821, + "learning_rate": 2.980813790908717e-06, + "loss": 0.5776, + "step": 1926 + }, + { + "epoch": 0.0795163819427251, + "grad_norm": 2.9516630946770754, + "learning_rate": 2.9807818157418976e-06, + "loss": 0.5923, + "step": 1927 + }, + { + "epoch": 0.07955764628208302, + "grad_norm": 5.034812614228846, + "learning_rate": 2.980749814124611e-06, + "loss": 0.5979, + "step": 1928 + }, + { + "epoch": 0.07959891062144095, + "grad_norm": 5.393056287489901, + "learning_rate": 2.9807177860574285e-06, + "loss": 0.6089, + "step": 1929 + }, + { + "epoch": 0.07964017496079888, + "grad_norm": 2.386548910130646, + "learning_rate": 2.980685731540922e-06, + "loss": 0.6241, + "step": 1930 + }, + { + "epoch": 0.07968143930015681, + "grad_norm": 3.645330632013277, + "learning_rate": 2.9806536505756642e-06, + "loss": 0.6309, + "step": 1931 + }, + { + "epoch": 0.07972270363951472, + "grad_norm": 5.109096388657026, + "learning_rate": 2.980621543162228e-06, + "loss": 0.5985, + "step": 1932 + }, + { + "epoch": 0.07976396797887265, + "grad_norm": 8.240336397645107, + "learning_rate": 2.9805894093011875e-06, + "loss": 0.5353, + "step": 1933 + }, + { + "epoch": 0.07980523231823058, + "grad_norm": 2.6737745411751024, + "learning_rate": 2.9805572489931163e-06, + "loss": 0.5885, + "step": 1934 + }, + { + "epoch": 0.07984649665758851, + "grad_norm": 5.856238222856473, + "learning_rate": 2.9805250622385886e-06, + "loss": 0.5913, + "step": 1935 + }, + { + "epoch": 0.07988776099694644, + "grad_norm": 3.0276846937751833, + "learning_rate": 2.9804928490381795e-06, + "loss": 0.6417, + "step": 1936 + }, + { + "epoch": 0.07992902533630436, + "grad_norm": 7.153720485808467, + "learning_rate": 2.9804606093924645e-06, + "loss": 0.5582, + "step": 1937 + }, + { + "epoch": 0.07997028967566229, + "grad_norm": 3.7158852728584684, + "learning_rate": 2.9804283433020196e-06, + "loss": 0.5973, + "step": 1938 + }, + { + "epoch": 0.08001155401502022, + "grad_norm": 6.185256447517217, + "learning_rate": 2.9803960507674213e-06, + "loss": 0.6148, + "step": 1939 + }, + { + "epoch": 0.08005281835437815, + "grad_norm": 3.807496615532927, + "learning_rate": 2.9803637317892455e-06, + "loss": 0.5655, + "step": 1940 + }, + { + "epoch": 0.08009408269373608, + "grad_norm": 6.297077360473792, + "learning_rate": 2.9803313863680708e-06, + "loss": 0.5879, + "step": 1941 + }, + { + "epoch": 0.080135347033094, + "grad_norm": 3.3664094602139625, + "learning_rate": 2.980299014504474e-06, + "loss": 0.5685, + "step": 1942 + }, + { + "epoch": 0.08017661137245193, + "grad_norm": 2.7779850951275304, + "learning_rate": 2.9802666161990338e-06, + "loss": 0.5856, + "step": 1943 + }, + { + "epoch": 0.08021787571180986, + "grad_norm": 2.6157474354584154, + "learning_rate": 2.980234191452328e-06, + "loss": 0.6407, + "step": 1944 + }, + { + "epoch": 0.08025914005116778, + "grad_norm": 3.7248424829039664, + "learning_rate": 2.9802017402649377e-06, + "loss": 0.5852, + "step": 1945 + }, + { + "epoch": 0.0803004043905257, + "grad_norm": 3.9637689954649096, + "learning_rate": 2.980169262637441e-06, + "loss": 0.5398, + "step": 1946 + }, + { + "epoch": 0.08034166872988363, + "grad_norm": 3.5850067524570015, + "learning_rate": 2.9801367585704185e-06, + "loss": 0.5975, + "step": 1947 + }, + { + "epoch": 0.08038293306924156, + "grad_norm": 3.841851521426193, + "learning_rate": 2.980104228064451e-06, + "loss": 0.614, + "step": 1948 + }, + { + "epoch": 0.08042419740859949, + "grad_norm": 3.6282181839957883, + "learning_rate": 2.9800716711201185e-06, + "loss": 0.5672, + "step": 1949 + }, + { + "epoch": 0.08046546174795742, + "grad_norm": 3.2097873571781608, + "learning_rate": 2.980039087738004e-06, + "loss": 0.5737, + "step": 1950 + }, + { + "epoch": 0.08050672608731534, + "grad_norm": 3.780442911358458, + "learning_rate": 2.980006477918689e-06, + "loss": 0.6711, + "step": 1951 + }, + { + "epoch": 0.08054799042667327, + "grad_norm": 3.9403969778156, + "learning_rate": 2.979973841662755e-06, + "loss": 0.5858, + "step": 1952 + }, + { + "epoch": 0.0805892547660312, + "grad_norm": 3.534174071828002, + "learning_rate": 2.9799411789707867e-06, + "loss": 0.6457, + "step": 1953 + }, + { + "epoch": 0.08063051910538913, + "grad_norm": 2.1584413821314623, + "learning_rate": 2.979908489843367e-06, + "loss": 0.5891, + "step": 1954 + }, + { + "epoch": 0.08067178344474706, + "grad_norm": 6.5121304318276545, + "learning_rate": 2.9798757742810786e-06, + "loss": 0.5889, + "step": 1955 + }, + { + "epoch": 0.08071304778410497, + "grad_norm": 5.232864963628358, + "learning_rate": 2.9798430322845074e-06, + "loss": 0.5475, + "step": 1956 + }, + { + "epoch": 0.0807543121234629, + "grad_norm": 3.2196259348956042, + "learning_rate": 2.9798102638542377e-06, + "loss": 0.5878, + "step": 1957 + }, + { + "epoch": 0.08079557646282083, + "grad_norm": 3.5226709280885316, + "learning_rate": 2.9797774689908547e-06, + "loss": 0.5704, + "step": 1958 + }, + { + "epoch": 0.08083684080217876, + "grad_norm": 5.600223450176424, + "learning_rate": 2.979744647694944e-06, + "loss": 0.4975, + "step": 1959 + }, + { + "epoch": 0.08087810514153669, + "grad_norm": 3.574900066920796, + "learning_rate": 2.979711799967092e-06, + "loss": 0.5636, + "step": 1960 + }, + { + "epoch": 0.0809193694808946, + "grad_norm": 3.366451324109844, + "learning_rate": 2.9796789258078858e-06, + "loss": 0.5672, + "step": 1961 + }, + { + "epoch": 0.08096063382025254, + "grad_norm": 2.760250921313126, + "learning_rate": 2.9796460252179126e-06, + "loss": 0.5862, + "step": 1962 + }, + { + "epoch": 0.08100189815961047, + "grad_norm": 3.0297900959787327, + "learning_rate": 2.9796130981977594e-06, + "loss": 0.5139, + "step": 1963 + }, + { + "epoch": 0.0810431624989684, + "grad_norm": 2.2365149844541294, + "learning_rate": 2.979580144748015e-06, + "loss": 0.581, + "step": 1964 + }, + { + "epoch": 0.08108442683832631, + "grad_norm": 6.0094587440282, + "learning_rate": 2.9795471648692684e-06, + "loss": 0.5789, + "step": 1965 + }, + { + "epoch": 0.08112569117768424, + "grad_norm": 2.471827793963785, + "learning_rate": 2.979514158562108e-06, + "loss": 0.5556, + "step": 1966 + }, + { + "epoch": 0.08116695551704217, + "grad_norm": 3.451544321362737, + "learning_rate": 2.979481125827123e-06, + "loss": 0.5876, + "step": 1967 + }, + { + "epoch": 0.0812082198564001, + "grad_norm": 3.1131197009735523, + "learning_rate": 2.9794480666649047e-06, + "loss": 0.5327, + "step": 1968 + }, + { + "epoch": 0.08124948419575803, + "grad_norm": 3.3070942359269013, + "learning_rate": 2.979414981076042e-06, + "loss": 0.5958, + "step": 1969 + }, + { + "epoch": 0.08129074853511595, + "grad_norm": 2.274725653451008, + "learning_rate": 2.979381869061128e-06, + "loss": 0.6175, + "step": 1970 + }, + { + "epoch": 0.08133201287447388, + "grad_norm": 2.7084915808148717, + "learning_rate": 2.979348730620752e-06, + "loss": 0.6146, + "step": 1971 + }, + { + "epoch": 0.08137327721383181, + "grad_norm": 5.350745639188273, + "learning_rate": 2.9793155657555073e-06, + "loss": 0.6442, + "step": 1972 + }, + { + "epoch": 0.08141454155318974, + "grad_norm": 2.7556048725963174, + "learning_rate": 2.979282374465986e-06, + "loss": 0.5561, + "step": 1973 + }, + { + "epoch": 0.08145580589254767, + "grad_norm": 3.620619235959193, + "learning_rate": 2.979249156752781e-06, + "loss": 0.5841, + "step": 1974 + }, + { + "epoch": 0.08149707023190558, + "grad_norm": 5.65312476681967, + "learning_rate": 2.979215912616485e-06, + "loss": 0.5201, + "step": 1975 + }, + { + "epoch": 0.08153833457126351, + "grad_norm": 5.209232125889297, + "learning_rate": 2.9791826420576924e-06, + "loss": 0.6235, + "step": 1976 + }, + { + "epoch": 0.08157959891062144, + "grad_norm": 3.084305634349146, + "learning_rate": 2.979149345076998e-06, + "loss": 0.6534, + "step": 1977 + }, + { + "epoch": 0.08162086324997937, + "grad_norm": 5.384635291202124, + "learning_rate": 2.979116021674996e-06, + "loss": 0.5484, + "step": 1978 + }, + { + "epoch": 0.08166212758933729, + "grad_norm": 4.671644553809277, + "learning_rate": 2.979082671852281e-06, + "loss": 0.5441, + "step": 1979 + }, + { + "epoch": 0.08170339192869522, + "grad_norm": 5.111022275856208, + "learning_rate": 2.9790492956094503e-06, + "loss": 0.5562, + "step": 1980 + }, + { + "epoch": 0.08174465626805315, + "grad_norm": 2.0735555977765983, + "learning_rate": 2.9790158929470983e-06, + "loss": 0.5833, + "step": 1981 + }, + { + "epoch": 0.08178592060741108, + "grad_norm": 3.002824486674632, + "learning_rate": 2.978982463865823e-06, + "loss": 0.5231, + "step": 1982 + }, + { + "epoch": 0.08182718494676901, + "grad_norm": 3.339289636070585, + "learning_rate": 2.978949008366221e-06, + "loss": 0.559, + "step": 1983 + }, + { + "epoch": 0.08186844928612692, + "grad_norm": 8.5909437786132, + "learning_rate": 2.9789155264488895e-06, + "loss": 0.609, + "step": 1984 + }, + { + "epoch": 0.08190971362548485, + "grad_norm": 8.788196547782274, + "learning_rate": 2.978882018114428e-06, + "loss": 0.5994, + "step": 1985 + }, + { + "epoch": 0.08195097796484278, + "grad_norm": 10.791783342903548, + "learning_rate": 2.978848483363433e-06, + "loss": 0.5364, + "step": 1986 + }, + { + "epoch": 0.08199224230420071, + "grad_norm": 4.312857418349053, + "learning_rate": 2.978814922196505e-06, + "loss": 0.6727, + "step": 1987 + }, + { + "epoch": 0.08203350664355864, + "grad_norm": 2.67394308949279, + "learning_rate": 2.9787813346142433e-06, + "loss": 0.5633, + "step": 1988 + }, + { + "epoch": 0.08207477098291656, + "grad_norm": 3.360023767203614, + "learning_rate": 2.9787477206172476e-06, + "loss": 0.5635, + "step": 1989 + }, + { + "epoch": 0.08211603532227449, + "grad_norm": 3.99484157949946, + "learning_rate": 2.978714080206118e-06, + "loss": 0.5697, + "step": 1990 + }, + { + "epoch": 0.08215729966163242, + "grad_norm": 4.141143898680708, + "learning_rate": 2.978680413381456e-06, + "loss": 0.6069, + "step": 1991 + }, + { + "epoch": 0.08219856400099035, + "grad_norm": 2.8262693410209727, + "learning_rate": 2.978646720143863e-06, + "loss": 0.6343, + "step": 1992 + }, + { + "epoch": 0.08223982834034826, + "grad_norm": 5.010319143990171, + "learning_rate": 2.97861300049394e-06, + "loss": 0.5281, + "step": 1993 + }, + { + "epoch": 0.0822810926797062, + "grad_norm": 4.634992735210345, + "learning_rate": 2.97857925443229e-06, + "loss": 0.5918, + "step": 1994 + }, + { + "epoch": 0.08232235701906412, + "grad_norm": 4.867639890376252, + "learning_rate": 2.9785454819595156e-06, + "loss": 0.5446, + "step": 1995 + }, + { + "epoch": 0.08236362135842205, + "grad_norm": 5.174868015245866, + "learning_rate": 2.9785116830762208e-06, + "loss": 0.5695, + "step": 1996 + }, + { + "epoch": 0.08240488569777998, + "grad_norm": 4.068588294879499, + "learning_rate": 2.978477857783008e-06, + "loss": 0.5893, + "step": 1997 + }, + { + "epoch": 0.0824461500371379, + "grad_norm": 3.12002481678004, + "learning_rate": 2.978444006080482e-06, + "loss": 0.5432, + "step": 1998 + }, + { + "epoch": 0.08248741437649583, + "grad_norm": 2.9990013381277065, + "learning_rate": 2.978410127969248e-06, + "loss": 0.6233, + "step": 1999 + }, + { + "epoch": 0.08252867871585376, + "grad_norm": 2.538455255808529, + "learning_rate": 2.978376223449911e-06, + "loss": 0.5431, + "step": 2000 + }, + { + "epoch": 0.08256994305521169, + "grad_norm": 5.403979740272791, + "learning_rate": 2.9783422925230755e-06, + "loss": 0.6524, + "step": 2001 + }, + { + "epoch": 0.08261120739456962, + "grad_norm": 2.6825705221956877, + "learning_rate": 2.9783083351893485e-06, + "loss": 0.5763, + "step": 2002 + }, + { + "epoch": 0.08265247173392753, + "grad_norm": 39.51437472595523, + "learning_rate": 2.978274351449337e-06, + "loss": 0.6176, + "step": 2003 + }, + { + "epoch": 0.08269373607328546, + "grad_norm": 3.520707811933135, + "learning_rate": 2.9782403413036476e-06, + "loss": 0.6096, + "step": 2004 + }, + { + "epoch": 0.0827350004126434, + "grad_norm": 3.657464187549355, + "learning_rate": 2.978206304752887e-06, + "loss": 0.539, + "step": 2005 + }, + { + "epoch": 0.08277626475200132, + "grad_norm": 6.535341713680766, + "learning_rate": 2.9781722417976646e-06, + "loss": 0.5966, + "step": 2006 + }, + { + "epoch": 0.08281752909135924, + "grad_norm": 9.855005298374536, + "learning_rate": 2.9781381524385876e-06, + "loss": 0.6188, + "step": 2007 + }, + { + "epoch": 0.08285879343071717, + "grad_norm": 2.58906577300751, + "learning_rate": 2.9781040366762653e-06, + "loss": 0.5505, + "step": 2008 + }, + { + "epoch": 0.0829000577700751, + "grad_norm": 4.210595879798387, + "learning_rate": 2.978069894511308e-06, + "loss": 0.6157, + "step": 2009 + }, + { + "epoch": 0.08294132210943303, + "grad_norm": 2.4295999903669934, + "learning_rate": 2.978035725944324e-06, + "loss": 0.5298, + "step": 2010 + }, + { + "epoch": 0.08298258644879096, + "grad_norm": 6.968396110940328, + "learning_rate": 2.978001530975925e-06, + "loss": 0.569, + "step": 2011 + }, + { + "epoch": 0.08302385078814888, + "grad_norm": 7.308845236288344, + "learning_rate": 2.9779673096067212e-06, + "loss": 0.5904, + "step": 2012 + }, + { + "epoch": 0.0830651151275068, + "grad_norm": 4.640137630036085, + "learning_rate": 2.9779330618373237e-06, + "loss": 0.5377, + "step": 2013 + }, + { + "epoch": 0.08310637946686474, + "grad_norm": 6.376272421469675, + "learning_rate": 2.977898787668345e-06, + "loss": 0.6025, + "step": 2014 + }, + { + "epoch": 0.08314764380622267, + "grad_norm": 3.409845024002493, + "learning_rate": 2.977864487100396e-06, + "loss": 0.54, + "step": 2015 + }, + { + "epoch": 0.0831889081455806, + "grad_norm": 3.589592253974087, + "learning_rate": 2.977830160134091e-06, + "loss": 0.5931, + "step": 2016 + }, + { + "epoch": 0.08323017248493851, + "grad_norm": 2.257639616585877, + "learning_rate": 2.977795806770042e-06, + "loss": 0.6096, + "step": 2017 + }, + { + "epoch": 0.08327143682429644, + "grad_norm": 3.6038908756547885, + "learning_rate": 2.9777614270088634e-06, + "loss": 0.5861, + "step": 2018 + }, + { + "epoch": 0.08331270116365437, + "grad_norm": 2.5018412064682054, + "learning_rate": 2.9777270208511682e-06, + "loss": 0.5962, + "step": 2019 + }, + { + "epoch": 0.0833539655030123, + "grad_norm": 3.520508483299783, + "learning_rate": 2.9776925882975725e-06, + "loss": 0.5763, + "step": 2020 + }, + { + "epoch": 0.08339522984237023, + "grad_norm": 3.828222718897785, + "learning_rate": 2.97765812934869e-06, + "loss": 0.5564, + "step": 2021 + }, + { + "epoch": 0.08343649418172815, + "grad_norm": 2.506998795287918, + "learning_rate": 2.977623644005137e-06, + "loss": 0.5695, + "step": 2022 + }, + { + "epoch": 0.08347775852108608, + "grad_norm": 2.643082502554474, + "learning_rate": 2.9775891322675295e-06, + "loss": 0.6272, + "step": 2023 + }, + { + "epoch": 0.083519022860444, + "grad_norm": 3.142958668686768, + "learning_rate": 2.977554594136483e-06, + "loss": 0.5817, + "step": 2024 + }, + { + "epoch": 0.08356028719980194, + "grad_norm": 2.630863970268298, + "learning_rate": 2.9775200296126157e-06, + "loss": 0.5538, + "step": 2025 + }, + { + "epoch": 0.08360155153915985, + "grad_norm": 3.0891107904724, + "learning_rate": 2.977485438696544e-06, + "loss": 0.5727, + "step": 2026 + }, + { + "epoch": 0.08364281587851778, + "grad_norm": 4.421041726276854, + "learning_rate": 2.9774508213888873e-06, + "loss": 0.6448, + "step": 2027 + }, + { + "epoch": 0.08368408021787571, + "grad_norm": 2.5489634469350686, + "learning_rate": 2.977416177690262e-06, + "loss": 0.592, + "step": 2028 + }, + { + "epoch": 0.08372534455723364, + "grad_norm": 24.37429780703054, + "learning_rate": 2.9773815076012884e-06, + "loss": 0.5695, + "step": 2029 + }, + { + "epoch": 0.08376660889659157, + "grad_norm": 3.4137016607526665, + "learning_rate": 2.9773468111225847e-06, + "loss": 0.5673, + "step": 2030 + }, + { + "epoch": 0.08380787323594949, + "grad_norm": 13.60750574750359, + "learning_rate": 2.9773120882547715e-06, + "loss": 0.5805, + "step": 2031 + }, + { + "epoch": 0.08384913757530742, + "grad_norm": 4.278573702260784, + "learning_rate": 2.9772773389984685e-06, + "loss": 0.5907, + "step": 2032 + }, + { + "epoch": 0.08389040191466535, + "grad_norm": 4.073334076143105, + "learning_rate": 2.977242563354297e-06, + "loss": 0.5631, + "step": 2033 + }, + { + "epoch": 0.08393166625402328, + "grad_norm": 3.245382679406922, + "learning_rate": 2.9772077613228777e-06, + "loss": 0.5525, + "step": 2034 + }, + { + "epoch": 0.0839729305933812, + "grad_norm": 3.6056121893803024, + "learning_rate": 2.977172932904832e-06, + "loss": 0.6041, + "step": 2035 + }, + { + "epoch": 0.08401419493273912, + "grad_norm": 7.794324036436118, + "learning_rate": 2.977138078100783e-06, + "loss": 0.6181, + "step": 2036 + }, + { + "epoch": 0.08405545927209705, + "grad_norm": 6.942096740975138, + "learning_rate": 2.9771031969113524e-06, + "loss": 0.5729, + "step": 2037 + }, + { + "epoch": 0.08409672361145498, + "grad_norm": 3.4289443825581185, + "learning_rate": 2.9770682893371635e-06, + "loss": 0.5774, + "step": 2038 + }, + { + "epoch": 0.08413798795081291, + "grad_norm": 2.2600884692064116, + "learning_rate": 2.9770333553788397e-06, + "loss": 0.5427, + "step": 2039 + }, + { + "epoch": 0.08417925229017083, + "grad_norm": 3.0571488255065074, + "learning_rate": 2.976998395037006e-06, + "loss": 0.5685, + "step": 2040 + }, + { + "epoch": 0.08422051662952876, + "grad_norm": 4.600317190942253, + "learning_rate": 2.9769634083122852e-06, + "loss": 0.5845, + "step": 2041 + }, + { + "epoch": 0.08426178096888669, + "grad_norm": 3.5692104777317524, + "learning_rate": 2.976928395205304e-06, + "loss": 0.5837, + "step": 2042 + }, + { + "epoch": 0.08430304530824462, + "grad_norm": 7.0605546548975395, + "learning_rate": 2.9768933557166856e-06, + "loss": 0.602, + "step": 2043 + }, + { + "epoch": 0.08434430964760255, + "grad_norm": 6.7333246747587605, + "learning_rate": 2.9768582898470584e-06, + "loss": 0.6037, + "step": 2044 + }, + { + "epoch": 0.08438557398696046, + "grad_norm": 2.7258546659442096, + "learning_rate": 2.976823197597047e-06, + "loss": 0.5887, + "step": 2045 + }, + { + "epoch": 0.08442683832631839, + "grad_norm": 3.8205782792963627, + "learning_rate": 2.9767880789672796e-06, + "loss": 0.6022, + "step": 2046 + }, + { + "epoch": 0.08446810266567632, + "grad_norm": 2.8531873661545397, + "learning_rate": 2.976752933958382e-06, + "loss": 0.6222, + "step": 2047 + }, + { + "epoch": 0.08450936700503425, + "grad_norm": 2.9607303401775824, + "learning_rate": 2.9767177625709833e-06, + "loss": 0.6223, + "step": 2048 + }, + { + "epoch": 0.08455063134439218, + "grad_norm": 8.99534325779918, + "learning_rate": 2.976682564805711e-06, + "loss": 0.6197, + "step": 2049 + }, + { + "epoch": 0.0845918956837501, + "grad_norm": 5.428908006772345, + "learning_rate": 2.9766473406631936e-06, + "loss": 0.5964, + "step": 2050 + }, + { + "epoch": 0.08463316002310803, + "grad_norm": 8.001150145855556, + "learning_rate": 2.9766120901440613e-06, + "loss": 0.6401, + "step": 2051 + }, + { + "epoch": 0.08467442436246596, + "grad_norm": 5.244608017340538, + "learning_rate": 2.9765768132489426e-06, + "loss": 0.559, + "step": 2052 + }, + { + "epoch": 0.08471568870182389, + "grad_norm": 2.72184447893492, + "learning_rate": 2.9765415099784687e-06, + "loss": 0.6437, + "step": 2053 + }, + { + "epoch": 0.0847569530411818, + "grad_norm": 3.951155287088671, + "learning_rate": 2.976506180333269e-06, + "loss": 0.5978, + "step": 2054 + }, + { + "epoch": 0.08479821738053973, + "grad_norm": 2.4491188568938207, + "learning_rate": 2.9764708243139755e-06, + "loss": 0.6136, + "step": 2055 + }, + { + "epoch": 0.08483948171989766, + "grad_norm": 2.3019297269793872, + "learning_rate": 2.97643544192122e-06, + "loss": 0.6155, + "step": 2056 + }, + { + "epoch": 0.0848807460592556, + "grad_norm": 4.291420399816688, + "learning_rate": 2.9764000331556334e-06, + "loss": 0.5509, + "step": 2057 + }, + { + "epoch": 0.08492201039861352, + "grad_norm": 2.1367934193144973, + "learning_rate": 2.9763645980178496e-06, + "loss": 0.5598, + "step": 2058 + }, + { + "epoch": 0.08496327473797144, + "grad_norm": 2.109957570867106, + "learning_rate": 2.9763291365084997e-06, + "loss": 0.6095, + "step": 2059 + }, + { + "epoch": 0.08500453907732937, + "grad_norm": 5.566616966118438, + "learning_rate": 2.9762936486282194e-06, + "loss": 0.5427, + "step": 2060 + }, + { + "epoch": 0.0850458034166873, + "grad_norm": 5.709473333182729, + "learning_rate": 2.9762581343776405e-06, + "loss": 0.6107, + "step": 2061 + }, + { + "epoch": 0.08508706775604523, + "grad_norm": 2.6060523289236586, + "learning_rate": 2.976222593757399e-06, + "loss": 0.5931, + "step": 2062 + }, + { + "epoch": 0.08512833209540316, + "grad_norm": 3.278028325376073, + "learning_rate": 2.9761870267681286e-06, + "loss": 0.578, + "step": 2063 + }, + { + "epoch": 0.08516959643476107, + "grad_norm": 7.8846792661894725, + "learning_rate": 2.976151433410465e-06, + "loss": 0.5671, + "step": 2064 + }, + { + "epoch": 0.085210860774119, + "grad_norm": 4.022408070736632, + "learning_rate": 2.976115813685044e-06, + "loss": 0.6109, + "step": 2065 + }, + { + "epoch": 0.08525212511347693, + "grad_norm": 6.594977060394223, + "learning_rate": 2.976080167592502e-06, + "loss": 0.5397, + "step": 2066 + }, + { + "epoch": 0.08529338945283486, + "grad_norm": 4.2328406399996465, + "learning_rate": 2.9760444951334756e-06, + "loss": 0.5635, + "step": 2067 + }, + { + "epoch": 0.0853346537921928, + "grad_norm": 3.414068009285473, + "learning_rate": 2.976008796308602e-06, + "loss": 0.612, + "step": 2068 + }, + { + "epoch": 0.08537591813155071, + "grad_norm": 3.7538347915893095, + "learning_rate": 2.975973071118519e-06, + "loss": 0.6125, + "step": 2069 + }, + { + "epoch": 0.08541718247090864, + "grad_norm": 4.649285653182026, + "learning_rate": 2.9759373195638645e-06, + "loss": 0.5948, + "step": 2070 + }, + { + "epoch": 0.08545844681026657, + "grad_norm": 5.975968710598369, + "learning_rate": 2.9759015416452775e-06, + "loss": 0.5995, + "step": 2071 + }, + { + "epoch": 0.0854997111496245, + "grad_norm": 3.0605451261161196, + "learning_rate": 2.975865737363397e-06, + "loss": 0.6108, + "step": 2072 + }, + { + "epoch": 0.08554097548898242, + "grad_norm": 2.7600340156951555, + "learning_rate": 2.9758299067188623e-06, + "loss": 0.5693, + "step": 2073 + }, + { + "epoch": 0.08558223982834035, + "grad_norm": 2.506439314861612, + "learning_rate": 2.9757940497123126e-06, + "loss": 0.5047, + "step": 2074 + }, + { + "epoch": 0.08562350416769827, + "grad_norm": 4.048779668723703, + "learning_rate": 2.97575816634439e-06, + "loss": 0.5455, + "step": 2075 + }, + { + "epoch": 0.0856647685070562, + "grad_norm": 3.637582681934616, + "learning_rate": 2.9757222566157354e-06, + "loss": 0.5719, + "step": 2076 + }, + { + "epoch": 0.08570603284641413, + "grad_norm": 7.844563877672757, + "learning_rate": 2.9756863205269884e-06, + "loss": 0.6084, + "step": 2077 + }, + { + "epoch": 0.08574729718577205, + "grad_norm": 3.677033390153644, + "learning_rate": 2.9756503580787926e-06, + "loss": 0.5577, + "step": 2078 + }, + { + "epoch": 0.08578856152512998, + "grad_norm": 6.223330283942195, + "learning_rate": 2.97561436927179e-06, + "loss": 0.5755, + "step": 2079 + }, + { + "epoch": 0.08582982586448791, + "grad_norm": 5.288800619993061, + "learning_rate": 2.9755783541066232e-06, + "loss": 0.6194, + "step": 2080 + }, + { + "epoch": 0.08587109020384584, + "grad_norm": 2.5980774427884232, + "learning_rate": 2.975542312583936e-06, + "loss": 0.5481, + "step": 2081 + }, + { + "epoch": 0.08591235454320377, + "grad_norm": 6.12521882190315, + "learning_rate": 2.9755062447043713e-06, + "loss": 0.575, + "step": 2082 + }, + { + "epoch": 0.08595361888256169, + "grad_norm": 3.0993048957463856, + "learning_rate": 2.975470150468574e-06, + "loss": 0.5604, + "step": 2083 + }, + { + "epoch": 0.08599488322191962, + "grad_norm": 2.7094062504849523, + "learning_rate": 2.9754340298771895e-06, + "loss": 0.5949, + "step": 2084 + }, + { + "epoch": 0.08603614756127755, + "grad_norm": 11.76188627222689, + "learning_rate": 2.9753978829308613e-06, + "loss": 0.6181, + "step": 2085 + }, + { + "epoch": 0.08607741190063548, + "grad_norm": 10.080810980710115, + "learning_rate": 2.9753617096302364e-06, + "loss": 0.5731, + "step": 2086 + }, + { + "epoch": 0.08611867623999339, + "grad_norm": 17.48972357263322, + "learning_rate": 2.975325509975961e-06, + "loss": 0.6174, + "step": 2087 + }, + { + "epoch": 0.08615994057935132, + "grad_norm": 33.47516374753467, + "learning_rate": 2.9752892839686808e-06, + "loss": 0.5694, + "step": 2088 + }, + { + "epoch": 0.08620120491870925, + "grad_norm": 5.432146862505166, + "learning_rate": 2.9752530316090435e-06, + "loss": 0.5625, + "step": 2089 + }, + { + "epoch": 0.08624246925806718, + "grad_norm": 4.963878258576949, + "learning_rate": 2.9752167528976966e-06, + "loss": 0.6257, + "step": 2090 + }, + { + "epoch": 0.08628373359742511, + "grad_norm": 4.262285907003392, + "learning_rate": 2.9751804478352882e-06, + "loss": 0.5421, + "step": 2091 + }, + { + "epoch": 0.08632499793678303, + "grad_norm": 5.72172710863286, + "learning_rate": 2.975144116422466e-06, + "loss": 0.5599, + "step": 2092 + }, + { + "epoch": 0.08636626227614096, + "grad_norm": 2.3124760890904326, + "learning_rate": 2.9751077586598803e-06, + "loss": 0.5626, + "step": 2093 + }, + { + "epoch": 0.08640752661549889, + "grad_norm": 3.073237840868285, + "learning_rate": 2.97507137454818e-06, + "loss": 0.5831, + "step": 2094 + }, + { + "epoch": 0.08644879095485682, + "grad_norm": 2.7046149875614365, + "learning_rate": 2.9750349640880144e-06, + "loss": 0.6195, + "step": 2095 + }, + { + "epoch": 0.08649005529421475, + "grad_norm": 2.855319275169046, + "learning_rate": 2.9749985272800343e-06, + "loss": 0.599, + "step": 2096 + }, + { + "epoch": 0.08653131963357266, + "grad_norm": 4.1635094847557275, + "learning_rate": 2.974962064124891e-06, + "loss": 0.5658, + "step": 2097 + }, + { + "epoch": 0.08657258397293059, + "grad_norm": 3.279418444115279, + "learning_rate": 2.9749255746232353e-06, + "loss": 0.5487, + "step": 2098 + }, + { + "epoch": 0.08661384831228852, + "grad_norm": 5.218140245031501, + "learning_rate": 2.974889058775719e-06, + "loss": 0.6061, + "step": 2099 + }, + { + "epoch": 0.08665511265164645, + "grad_norm": 2.6383503030276363, + "learning_rate": 2.9748525165829948e-06, + "loss": 0.6, + "step": 2100 + }, + { + "epoch": 0.08669637699100437, + "grad_norm": 5.106581661511918, + "learning_rate": 2.974815948045715e-06, + "loss": 0.5786, + "step": 2101 + }, + { + "epoch": 0.0867376413303623, + "grad_norm": 2.682395735224028, + "learning_rate": 2.974779353164533e-06, + "loss": 0.6453, + "step": 2102 + }, + { + "epoch": 0.08677890566972023, + "grad_norm": 2.776639914782237, + "learning_rate": 2.974742731940102e-06, + "loss": 0.595, + "step": 2103 + }, + { + "epoch": 0.08682017000907816, + "grad_norm": 6.890831506438373, + "learning_rate": 2.974706084373077e-06, + "loss": 0.6019, + "step": 2104 + }, + { + "epoch": 0.08686143434843609, + "grad_norm": 2.8554175413089187, + "learning_rate": 2.974669410464112e-06, + "loss": 0.6078, + "step": 2105 + }, + { + "epoch": 0.086902698687794, + "grad_norm": 4.36101400955975, + "learning_rate": 2.9746327102138624e-06, + "loss": 0.5248, + "step": 2106 + }, + { + "epoch": 0.08694396302715193, + "grad_norm": 3.857227285675246, + "learning_rate": 2.974595983622984e-06, + "loss": 0.5921, + "step": 2107 + }, + { + "epoch": 0.08698522736650986, + "grad_norm": 3.771475826202857, + "learning_rate": 2.9745592306921318e-06, + "loss": 0.6018, + "step": 2108 + }, + { + "epoch": 0.08702649170586779, + "grad_norm": 3.886245801738709, + "learning_rate": 2.9745224514219633e-06, + "loss": 0.5784, + "step": 2109 + }, + { + "epoch": 0.08706775604522572, + "grad_norm": 3.07360045744589, + "learning_rate": 2.974485645813135e-06, + "loss": 0.5514, + "step": 2110 + }, + { + "epoch": 0.08710902038458364, + "grad_norm": 5.432201706614232, + "learning_rate": 2.9744488138663044e-06, + "loss": 0.5866, + "step": 2111 + }, + { + "epoch": 0.08715028472394157, + "grad_norm": 10.916833811392012, + "learning_rate": 2.97441195558213e-06, + "loss": 0.6107, + "step": 2112 + }, + { + "epoch": 0.0871915490632995, + "grad_norm": 3.8992943005020324, + "learning_rate": 2.9743750709612687e-06, + "loss": 0.5673, + "step": 2113 + }, + { + "epoch": 0.08723281340265743, + "grad_norm": 5.4968656569040535, + "learning_rate": 2.9743381600043813e-06, + "loss": 0.5795, + "step": 2114 + }, + { + "epoch": 0.08727407774201534, + "grad_norm": 3.5313054109975397, + "learning_rate": 2.9743012227121256e-06, + "loss": 0.515, + "step": 2115 + }, + { + "epoch": 0.08731534208137327, + "grad_norm": 3.793675416355205, + "learning_rate": 2.974264259085162e-06, + "loss": 0.6125, + "step": 2116 + }, + { + "epoch": 0.0873566064207312, + "grad_norm": 4.943688456697194, + "learning_rate": 2.974227269124151e-06, + "loss": 0.5904, + "step": 2117 + }, + { + "epoch": 0.08739787076008913, + "grad_norm": 2.6988897143675787, + "learning_rate": 2.9741902528297523e-06, + "loss": 0.567, + "step": 2118 + }, + { + "epoch": 0.08743913509944706, + "grad_norm": 2.1761065000512523, + "learning_rate": 2.9741532102026282e-06, + "loss": 0.6055, + "step": 2119 + }, + { + "epoch": 0.08748039943880498, + "grad_norm": 2.893411235125291, + "learning_rate": 2.9741161412434403e-06, + "loss": 0.534, + "step": 2120 + }, + { + "epoch": 0.08752166377816291, + "grad_norm": 6.511598082937605, + "learning_rate": 2.9740790459528504e-06, + "loss": 0.5563, + "step": 2121 + }, + { + "epoch": 0.08756292811752084, + "grad_norm": 4.403621054174708, + "learning_rate": 2.9740419243315205e-06, + "loss": 0.6205, + "step": 2122 + }, + { + "epoch": 0.08760419245687877, + "grad_norm": 5.4003129840131905, + "learning_rate": 2.974004776380115e-06, + "loss": 0.6022, + "step": 2123 + }, + { + "epoch": 0.0876454567962367, + "grad_norm": 3.8409237487253316, + "learning_rate": 2.9739676020992962e-06, + "loss": 0.5648, + "step": 2124 + }, + { + "epoch": 0.08768672113559461, + "grad_norm": 2.9533954046683584, + "learning_rate": 2.973930401489729e-06, + "loss": 0.5568, + "step": 2125 + }, + { + "epoch": 0.08772798547495254, + "grad_norm": 7.066582042799568, + "learning_rate": 2.973893174552078e-06, + "loss": 0.5577, + "step": 2126 + }, + { + "epoch": 0.08776924981431047, + "grad_norm": 2.82304385394521, + "learning_rate": 2.9738559212870075e-06, + "loss": 0.5847, + "step": 2127 + }, + { + "epoch": 0.0878105141536684, + "grad_norm": 2.8431838036098793, + "learning_rate": 2.9738186416951836e-06, + "loss": 0.5928, + "step": 2128 + }, + { + "epoch": 0.08785177849302633, + "grad_norm": 10.440348121623861, + "learning_rate": 2.9737813357772713e-06, + "loss": 0.5921, + "step": 2129 + }, + { + "epoch": 0.08789304283238425, + "grad_norm": 5.420219103802202, + "learning_rate": 2.9737440035339374e-06, + "loss": 0.641, + "step": 2130 + }, + { + "epoch": 0.08793430717174218, + "grad_norm": 4.4166279548100205, + "learning_rate": 2.973706644965849e-06, + "loss": 0.554, + "step": 2131 + }, + { + "epoch": 0.08797557151110011, + "grad_norm": 4.674443234015267, + "learning_rate": 2.973669260073674e-06, + "loss": 0.6375, + "step": 2132 + }, + { + "epoch": 0.08801683585045804, + "grad_norm": 2.5037803807730574, + "learning_rate": 2.9736318488580784e-06, + "loss": 0.4967, + "step": 2133 + }, + { + "epoch": 0.08805810018981595, + "grad_norm": 2.2827146625510393, + "learning_rate": 2.9735944113197324e-06, + "loss": 0.5633, + "step": 2134 + }, + { + "epoch": 0.08809936452917388, + "grad_norm": 3.3077331465596087, + "learning_rate": 2.973556947459303e-06, + "loss": 0.542, + "step": 2135 + }, + { + "epoch": 0.08814062886853181, + "grad_norm": 2.770346160027388, + "learning_rate": 2.973519457277461e-06, + "loss": 0.6496, + "step": 2136 + }, + { + "epoch": 0.08818189320788974, + "grad_norm": 2.5847060477563897, + "learning_rate": 2.973481940774875e-06, + "loss": 0.5848, + "step": 2137 + }, + { + "epoch": 0.08822315754724767, + "grad_norm": 2.34434073230117, + "learning_rate": 2.973444397952215e-06, + "loss": 0.5294, + "step": 2138 + }, + { + "epoch": 0.08826442188660559, + "grad_norm": 7.062834150759292, + "learning_rate": 2.973406828810153e-06, + "loss": 0.6432, + "step": 2139 + }, + { + "epoch": 0.08830568622596352, + "grad_norm": 2.24052841791296, + "learning_rate": 2.9733692333493586e-06, + "loss": 0.5954, + "step": 2140 + }, + { + "epoch": 0.08834695056532145, + "grad_norm": 2.9351379596234866, + "learning_rate": 2.9733316115705038e-06, + "loss": 0.5665, + "step": 2141 + }, + { + "epoch": 0.08838821490467938, + "grad_norm": 2.5902237882262025, + "learning_rate": 2.973293963474261e-06, + "loss": 0.588, + "step": 2142 + }, + { + "epoch": 0.08842947924403731, + "grad_norm": 4.093079886962635, + "learning_rate": 2.973256289061302e-06, + "loss": 0.6503, + "step": 2143 + }, + { + "epoch": 0.08847074358339523, + "grad_norm": 3.716017259128364, + "learning_rate": 2.9732185883323006e-06, + "loss": 0.5367, + "step": 2144 + }, + { + "epoch": 0.08851200792275316, + "grad_norm": 3.1682020932494317, + "learning_rate": 2.9731808612879295e-06, + "loss": 0.5927, + "step": 2145 + }, + { + "epoch": 0.08855327226211109, + "grad_norm": 2.6369528146208054, + "learning_rate": 2.973143107928863e-06, + "loss": 0.5234, + "step": 2146 + }, + { + "epoch": 0.08859453660146901, + "grad_norm": 4.420436863461317, + "learning_rate": 2.973105328255775e-06, + "loss": 0.6367, + "step": 2147 + }, + { + "epoch": 0.08863580094082693, + "grad_norm": 3.115421051018322, + "learning_rate": 2.9730675222693413e-06, + "loss": 0.6043, + "step": 2148 + }, + { + "epoch": 0.08867706528018486, + "grad_norm": 2.936596060314819, + "learning_rate": 2.973029689970236e-06, + "loss": 0.6363, + "step": 2149 + }, + { + "epoch": 0.08871832961954279, + "grad_norm": 3.884953781252289, + "learning_rate": 2.972991831359136e-06, + "loss": 0.6052, + "step": 2150 + }, + { + "epoch": 0.08875959395890072, + "grad_norm": 2.725903994713205, + "learning_rate": 2.972953946436717e-06, + "loss": 0.5725, + "step": 2151 + }, + { + "epoch": 0.08880085829825865, + "grad_norm": 3.6084619832247373, + "learning_rate": 2.9729160352036553e-06, + "loss": 0.5928, + "step": 2152 + }, + { + "epoch": 0.08884212263761657, + "grad_norm": 5.021331072592995, + "learning_rate": 2.972878097660629e-06, + "loss": 0.5639, + "step": 2153 + }, + { + "epoch": 0.0888833869769745, + "grad_norm": 2.7463923285611758, + "learning_rate": 2.972840133808315e-06, + "loss": 0.5352, + "step": 2154 + }, + { + "epoch": 0.08892465131633243, + "grad_norm": 4.102560909255488, + "learning_rate": 2.972802143647392e-06, + "loss": 0.5818, + "step": 2155 + }, + { + "epoch": 0.08896591565569036, + "grad_norm": 5.587128986733742, + "learning_rate": 2.9727641271785378e-06, + "loss": 0.5919, + "step": 2156 + }, + { + "epoch": 0.08900717999504829, + "grad_norm": 2.90205422376937, + "learning_rate": 2.9727260844024322e-06, + "loss": 0.5527, + "step": 2157 + }, + { + "epoch": 0.0890484443344062, + "grad_norm": 3.210099856158834, + "learning_rate": 2.9726880153197547e-06, + "loss": 0.5489, + "step": 2158 + }, + { + "epoch": 0.08908970867376413, + "grad_norm": 5.301438130466324, + "learning_rate": 2.9726499199311853e-06, + "loss": 0.6036, + "step": 2159 + }, + { + "epoch": 0.08913097301312206, + "grad_norm": 6.3920351189983675, + "learning_rate": 2.9726117982374042e-06, + "loss": 0.631, + "step": 2160 + }, + { + "epoch": 0.08917223735247999, + "grad_norm": 4.1278664068949755, + "learning_rate": 2.9725736502390925e-06, + "loss": 0.59, + "step": 2161 + }, + { + "epoch": 0.08921350169183791, + "grad_norm": 2.865839680507167, + "learning_rate": 2.9725354759369315e-06, + "loss": 0.6598, + "step": 2162 + }, + { + "epoch": 0.08925476603119584, + "grad_norm": 14.087360552247285, + "learning_rate": 2.972497275331603e-06, + "loss": 0.6208, + "step": 2163 + }, + { + "epoch": 0.08929603037055377, + "grad_norm": 2.363715284309113, + "learning_rate": 2.9724590484237897e-06, + "loss": 0.5651, + "step": 2164 + }, + { + "epoch": 0.0893372947099117, + "grad_norm": 1.957072646015211, + "learning_rate": 2.9724207952141742e-06, + "loss": 0.5753, + "step": 2165 + }, + { + "epoch": 0.08937855904926963, + "grad_norm": 3.101898521509776, + "learning_rate": 2.97238251570344e-06, + "loss": 0.5776, + "step": 2166 + }, + { + "epoch": 0.08941982338862754, + "grad_norm": 2.424766987900437, + "learning_rate": 2.972344209892271e-06, + "loss": 0.5559, + "step": 2167 + }, + { + "epoch": 0.08946108772798547, + "grad_norm": 4.876702913368888, + "learning_rate": 2.9723058777813506e-06, + "loss": 0.5884, + "step": 2168 + }, + { + "epoch": 0.0895023520673434, + "grad_norm": 4.188847895579721, + "learning_rate": 2.9722675193713646e-06, + "loss": 0.5827, + "step": 2169 + }, + { + "epoch": 0.08954361640670133, + "grad_norm": 3.3004525732713423, + "learning_rate": 2.9722291346629974e-06, + "loss": 0.6035, + "step": 2170 + }, + { + "epoch": 0.08958488074605926, + "grad_norm": 5.74246389478962, + "learning_rate": 2.9721907236569346e-06, + "loss": 0.5563, + "step": 2171 + }, + { + "epoch": 0.08962614508541718, + "grad_norm": 6.431781350681638, + "learning_rate": 2.972152286353863e-06, + "loss": 0.6014, + "step": 2172 + }, + { + "epoch": 0.08966740942477511, + "grad_norm": 2.333868541360918, + "learning_rate": 2.972113822754469e-06, + "loss": 0.5579, + "step": 2173 + }, + { + "epoch": 0.08970867376413304, + "grad_norm": 4.376512589854172, + "learning_rate": 2.972075332859439e-06, + "loss": 0.6081, + "step": 2174 + }, + { + "epoch": 0.08974993810349097, + "grad_norm": 3.5568551071493766, + "learning_rate": 2.972036816669461e-06, + "loss": 0.5187, + "step": 2175 + }, + { + "epoch": 0.08979120244284888, + "grad_norm": 2.4846068081902297, + "learning_rate": 2.971998274185223e-06, + "loss": 0.5425, + "step": 2176 + }, + { + "epoch": 0.08983246678220681, + "grad_norm": 3.022301128970938, + "learning_rate": 2.971959705407414e-06, + "loss": 0.5664, + "step": 2177 + }, + { + "epoch": 0.08987373112156474, + "grad_norm": 3.146099923745243, + "learning_rate": 2.971921110336722e-06, + "loss": 0.6004, + "step": 2178 + }, + { + "epoch": 0.08991499546092267, + "grad_norm": 3.3175282886217126, + "learning_rate": 2.9718824889738366e-06, + "loss": 0.5753, + "step": 2179 + }, + { + "epoch": 0.0899562598002806, + "grad_norm": 5.514964079115453, + "learning_rate": 2.9718438413194485e-06, + "loss": 0.6537, + "step": 2180 + }, + { + "epoch": 0.08999752413963852, + "grad_norm": 2.363900704234513, + "learning_rate": 2.9718051673742466e-06, + "loss": 0.6378, + "step": 2181 + }, + { + "epoch": 0.09003878847899645, + "grad_norm": 3.033833729590746, + "learning_rate": 2.971766467138923e-06, + "loss": 0.5453, + "step": 2182 + }, + { + "epoch": 0.09008005281835438, + "grad_norm": 2.603989918322168, + "learning_rate": 2.9717277406141685e-06, + "loss": 0.5259, + "step": 2183 + }, + { + "epoch": 0.09012131715771231, + "grad_norm": 8.22629550345107, + "learning_rate": 2.9716889878006747e-06, + "loss": 0.5394, + "step": 2184 + }, + { + "epoch": 0.09016258149707024, + "grad_norm": 5.539128379819088, + "learning_rate": 2.971650208699134e-06, + "loss": 0.5642, + "step": 2185 + }, + { + "epoch": 0.09020384583642815, + "grad_norm": 7.157564264826067, + "learning_rate": 2.971611403310239e-06, + "loss": 0.6814, + "step": 2186 + }, + { + "epoch": 0.09024511017578608, + "grad_norm": 4.787124917553874, + "learning_rate": 2.971572571634683e-06, + "loss": 0.5013, + "step": 2187 + }, + { + "epoch": 0.09028637451514401, + "grad_norm": 2.4215622667264864, + "learning_rate": 2.9715337136731596e-06, + "loss": 0.5757, + "step": 2188 + }, + { + "epoch": 0.09032763885450194, + "grad_norm": 5.515182115888922, + "learning_rate": 2.9714948294263625e-06, + "loss": 0.5607, + "step": 2189 + }, + { + "epoch": 0.09036890319385987, + "grad_norm": 3.030605007337276, + "learning_rate": 2.971455918894987e-06, + "loss": 0.5849, + "step": 2190 + }, + { + "epoch": 0.09041016753321779, + "grad_norm": 4.223139944728818, + "learning_rate": 2.971416982079728e-06, + "loss": 0.6008, + "step": 2191 + }, + { + "epoch": 0.09045143187257572, + "grad_norm": 3.549621171337326, + "learning_rate": 2.97137801898128e-06, + "loss": 0.618, + "step": 2192 + }, + { + "epoch": 0.09049269621193365, + "grad_norm": 3.7594798375841822, + "learning_rate": 2.9713390296003404e-06, + "loss": 0.5555, + "step": 2193 + }, + { + "epoch": 0.09053396055129158, + "grad_norm": 3.3880372178193046, + "learning_rate": 2.971300013937605e-06, + "loss": 0.5905, + "step": 2194 + }, + { + "epoch": 0.0905752248906495, + "grad_norm": 3.8525942672772393, + "learning_rate": 2.9712609719937706e-06, + "loss": 0.5702, + "step": 2195 + }, + { + "epoch": 0.09061648923000742, + "grad_norm": 4.904197218150377, + "learning_rate": 2.971221903769535e-06, + "loss": 0.6086, + "step": 2196 + }, + { + "epoch": 0.09065775356936535, + "grad_norm": 3.30845186592929, + "learning_rate": 2.9711828092655955e-06, + "loss": 0.5849, + "step": 2197 + }, + { + "epoch": 0.09069901790872328, + "grad_norm": 1.6990542675877303, + "learning_rate": 2.9711436884826503e-06, + "loss": 0.5769, + "step": 2198 + }, + { + "epoch": 0.09074028224808121, + "grad_norm": 7.34002403247591, + "learning_rate": 2.971104541421399e-06, + "loss": 0.5195, + "step": 2199 + }, + { + "epoch": 0.09078154658743913, + "grad_norm": 5.110364505191264, + "learning_rate": 2.97106536808254e-06, + "loss": 0.5758, + "step": 2200 + }, + { + "epoch": 0.09082281092679706, + "grad_norm": 2.4519428187183263, + "learning_rate": 2.971026168466774e-06, + "loss": 0.6147, + "step": 2201 + }, + { + "epoch": 0.09086407526615499, + "grad_norm": 5.854038708337269, + "learning_rate": 2.9709869425748007e-06, + "loss": 0.6354, + "step": 2202 + }, + { + "epoch": 0.09090533960551292, + "grad_norm": 3.6097695258394533, + "learning_rate": 2.9709476904073206e-06, + "loss": 0.5611, + "step": 2203 + }, + { + "epoch": 0.09094660394487085, + "grad_norm": 8.215122016987982, + "learning_rate": 2.970908411965035e-06, + "loss": 0.579, + "step": 2204 + }, + { + "epoch": 0.09098786828422877, + "grad_norm": 7.88110401102161, + "learning_rate": 2.9708691072486457e-06, + "loss": 0.5789, + "step": 2205 + }, + { + "epoch": 0.0910291326235867, + "grad_norm": 6.438853468629428, + "learning_rate": 2.970829776258854e-06, + "loss": 0.6183, + "step": 2206 + }, + { + "epoch": 0.09107039696294462, + "grad_norm": 2.876842645512708, + "learning_rate": 2.9707904189963633e-06, + "loss": 0.5952, + "step": 2207 + }, + { + "epoch": 0.09111166130230255, + "grad_norm": 3.233264359367632, + "learning_rate": 2.970751035461877e-06, + "loss": 0.5528, + "step": 2208 + }, + { + "epoch": 0.09115292564166047, + "grad_norm": 2.3850071578917835, + "learning_rate": 2.9707116256560972e-06, + "loss": 0.612, + "step": 2209 + }, + { + "epoch": 0.0911941899810184, + "grad_norm": 5.183356344770973, + "learning_rate": 2.970672189579729e-06, + "loss": 0.5156, + "step": 2210 + }, + { + "epoch": 0.09123545432037633, + "grad_norm": 2.5608891126199196, + "learning_rate": 2.9706327272334764e-06, + "loss": 0.5685, + "step": 2211 + }, + { + "epoch": 0.09127671865973426, + "grad_norm": 1.8840869272515874, + "learning_rate": 2.970593238618044e-06, + "loss": 0.577, + "step": 2212 + }, + { + "epoch": 0.09131798299909219, + "grad_norm": 22.228887854927112, + "learning_rate": 2.9705537237341382e-06, + "loss": 0.5468, + "step": 2213 + }, + { + "epoch": 0.0913592473384501, + "grad_norm": 3.380650742479043, + "learning_rate": 2.9705141825824632e-06, + "loss": 0.5875, + "step": 2214 + }, + { + "epoch": 0.09140051167780804, + "grad_norm": 2.969726544686636, + "learning_rate": 2.970474615163727e-06, + "loss": 0.559, + "step": 2215 + }, + { + "epoch": 0.09144177601716597, + "grad_norm": 4.419258119930163, + "learning_rate": 2.9704350214786353e-06, + "loss": 0.5472, + "step": 2216 + }, + { + "epoch": 0.0914830403565239, + "grad_norm": 3.7283412611563183, + "learning_rate": 2.9703954015278956e-06, + "loss": 0.5763, + "step": 2217 + }, + { + "epoch": 0.09152430469588183, + "grad_norm": 16.78341733341221, + "learning_rate": 2.970355755312216e-06, + "loss": 0.5568, + "step": 2218 + }, + { + "epoch": 0.09156556903523974, + "grad_norm": 2.210566629367558, + "learning_rate": 2.9703160828323037e-06, + "loss": 0.5018, + "step": 2219 + }, + { + "epoch": 0.09160683337459767, + "grad_norm": 2.3090790423496754, + "learning_rate": 2.9702763840888682e-06, + "loss": 0.5792, + "step": 2220 + }, + { + "epoch": 0.0916480977139556, + "grad_norm": 2.465010059853682, + "learning_rate": 2.970236659082619e-06, + "loss": 0.5397, + "step": 2221 + }, + { + "epoch": 0.09168936205331353, + "grad_norm": 3.495581013361753, + "learning_rate": 2.9701969078142645e-06, + "loss": 0.6014, + "step": 2222 + }, + { + "epoch": 0.09173062639267145, + "grad_norm": 4.375446710900859, + "learning_rate": 2.9701571302845155e-06, + "loss": 0.6213, + "step": 2223 + }, + { + "epoch": 0.09177189073202938, + "grad_norm": 5.046210869783301, + "learning_rate": 2.9701173264940825e-06, + "loss": 0.6015, + "step": 2224 + }, + { + "epoch": 0.0918131550713873, + "grad_norm": 8.975272177846717, + "learning_rate": 2.970077496443676e-06, + "loss": 0.6054, + "step": 2225 + }, + { + "epoch": 0.09185441941074524, + "grad_norm": 5.578195883840526, + "learning_rate": 2.9700376401340083e-06, + "loss": 0.5942, + "step": 2226 + }, + { + "epoch": 0.09189568375010317, + "grad_norm": 3.602950222595639, + "learning_rate": 2.969997757565791e-06, + "loss": 0.5551, + "step": 2227 + }, + { + "epoch": 0.09193694808946108, + "grad_norm": 4.784644143713568, + "learning_rate": 2.969957848739736e-06, + "loss": 0.579, + "step": 2228 + }, + { + "epoch": 0.09197821242881901, + "grad_norm": 2.810063972711374, + "learning_rate": 2.9699179136565566e-06, + "loss": 0.6386, + "step": 2229 + }, + { + "epoch": 0.09201947676817694, + "grad_norm": 3.0255221034352426, + "learning_rate": 2.9698779523169663e-06, + "loss": 0.5653, + "step": 2230 + }, + { + "epoch": 0.09206074110753487, + "grad_norm": 4.5199805529718, + "learning_rate": 2.9698379647216784e-06, + "loss": 0.5864, + "step": 2231 + }, + { + "epoch": 0.0921020054468928, + "grad_norm": 4.703695046110307, + "learning_rate": 2.9697979508714075e-06, + "loss": 0.6234, + "step": 2232 + }, + { + "epoch": 0.09214326978625072, + "grad_norm": 2.37539713949602, + "learning_rate": 2.969757910766869e-06, + "loss": 0.5769, + "step": 2233 + }, + { + "epoch": 0.09218453412560865, + "grad_norm": 5.333298784378068, + "learning_rate": 2.9697178444087765e-06, + "loss": 0.6399, + "step": 2234 + }, + { + "epoch": 0.09222579846496658, + "grad_norm": 2.9791172298730335, + "learning_rate": 2.969677751797847e-06, + "loss": 0.5719, + "step": 2235 + }, + { + "epoch": 0.0922670628043245, + "grad_norm": 2.798977021366789, + "learning_rate": 2.969637632934796e-06, + "loss": 0.6134, + "step": 2236 + }, + { + "epoch": 0.09230832714368244, + "grad_norm": 3.9340536676057853, + "learning_rate": 2.969597487820341e-06, + "loss": 0.6107, + "step": 2237 + }, + { + "epoch": 0.09234959148304035, + "grad_norm": 3.2091349020096063, + "learning_rate": 2.969557316455198e-06, + "loss": 0.5817, + "step": 2238 + }, + { + "epoch": 0.09239085582239828, + "grad_norm": 3.043752538419158, + "learning_rate": 2.9695171188400857e-06, + "loss": 0.6048, + "step": 2239 + }, + { + "epoch": 0.09243212016175621, + "grad_norm": 4.8631454126999225, + "learning_rate": 2.969476894975721e-06, + "loss": 0.5845, + "step": 2240 + }, + { + "epoch": 0.09247338450111414, + "grad_norm": 4.577236291392567, + "learning_rate": 2.9694366448628227e-06, + "loss": 0.5833, + "step": 2241 + }, + { + "epoch": 0.09251464884047206, + "grad_norm": 3.2741308968905645, + "learning_rate": 2.9693963685021104e-06, + "loss": 0.5925, + "step": 2242 + }, + { + "epoch": 0.09255591317982999, + "grad_norm": 6.347198869765426, + "learning_rate": 2.9693560658943025e-06, + "loss": 0.5352, + "step": 2243 + }, + { + "epoch": 0.09259717751918792, + "grad_norm": 5.0278506200368565, + "learning_rate": 2.9693157370401196e-06, + "loss": 0.6063, + "step": 2244 + }, + { + "epoch": 0.09263844185854585, + "grad_norm": 2.536222619234193, + "learning_rate": 2.969275381940283e-06, + "loss": 0.6159, + "step": 2245 + }, + { + "epoch": 0.09267970619790378, + "grad_norm": 3.2615068698204466, + "learning_rate": 2.9692350005955114e-06, + "loss": 0.5435, + "step": 2246 + }, + { + "epoch": 0.0927209705372617, + "grad_norm": 2.6840425594237134, + "learning_rate": 2.9691945930065277e-06, + "loss": 0.5829, + "step": 2247 + }, + { + "epoch": 0.09276223487661962, + "grad_norm": 3.4445124616206844, + "learning_rate": 2.969154159174053e-06, + "loss": 0.5541, + "step": 2248 + }, + { + "epoch": 0.09280349921597755, + "grad_norm": 3.000379203866031, + "learning_rate": 2.96911369909881e-06, + "loss": 0.5953, + "step": 2249 + }, + { + "epoch": 0.09284476355533548, + "grad_norm": 7.5417851035539885, + "learning_rate": 2.9690732127815208e-06, + "loss": 0.5918, + "step": 2250 + }, + { + "epoch": 0.09288602789469341, + "grad_norm": 6.678793008472283, + "learning_rate": 2.9690327002229093e-06, + "loss": 0.6104, + "step": 2251 + }, + { + "epoch": 0.09292729223405133, + "grad_norm": 5.951978038519827, + "learning_rate": 2.9689921614236987e-06, + "loss": 0.5403, + "step": 2252 + }, + { + "epoch": 0.09296855657340926, + "grad_norm": 3.5791953548088444, + "learning_rate": 2.968951596384613e-06, + "loss": 0.5947, + "step": 2253 + }, + { + "epoch": 0.09300982091276719, + "grad_norm": 4.6145196679253955, + "learning_rate": 2.9689110051063777e-06, + "loss": 0.5287, + "step": 2254 + }, + { + "epoch": 0.09305108525212512, + "grad_norm": 2.551602537189321, + "learning_rate": 2.9688703875897165e-06, + "loss": 0.601, + "step": 2255 + }, + { + "epoch": 0.09309234959148303, + "grad_norm": 3.9331407175826, + "learning_rate": 2.968829743835356e-06, + "loss": 0.5317, + "step": 2256 + }, + { + "epoch": 0.09313361393084096, + "grad_norm": 3.46946902411453, + "learning_rate": 2.9687890738440217e-06, + "loss": 0.536, + "step": 2257 + }, + { + "epoch": 0.0931748782701989, + "grad_norm": 8.102482209386636, + "learning_rate": 2.96874837761644e-06, + "loss": 0.5853, + "step": 2258 + }, + { + "epoch": 0.09321614260955682, + "grad_norm": 3.08489292567048, + "learning_rate": 2.9687076551533388e-06, + "loss": 0.5819, + "step": 2259 + }, + { + "epoch": 0.09325740694891475, + "grad_norm": 4.748116155061617, + "learning_rate": 2.968666906455444e-06, + "loss": 0.6232, + "step": 2260 + }, + { + "epoch": 0.09329867128827267, + "grad_norm": 2.337407130715243, + "learning_rate": 2.9686261315234844e-06, + "loss": 0.5552, + "step": 2261 + }, + { + "epoch": 0.0933399356276306, + "grad_norm": 6.3369588291003325, + "learning_rate": 2.9685853303581884e-06, + "loss": 0.6162, + "step": 2262 + }, + { + "epoch": 0.09338119996698853, + "grad_norm": 6.244193505969213, + "learning_rate": 2.968544502960284e-06, + "loss": 0.608, + "step": 2263 + }, + { + "epoch": 0.09342246430634646, + "grad_norm": 2.4640936002611653, + "learning_rate": 2.9685036493305013e-06, + "loss": 0.5615, + "step": 2264 + }, + { + "epoch": 0.09346372864570439, + "grad_norm": 3.3535569953226814, + "learning_rate": 2.96846276946957e-06, + "loss": 0.6354, + "step": 2265 + }, + { + "epoch": 0.0935049929850623, + "grad_norm": 2.4671364973831507, + "learning_rate": 2.96842186337822e-06, + "loss": 0.5657, + "step": 2266 + }, + { + "epoch": 0.09354625732442023, + "grad_norm": 4.256567416699889, + "learning_rate": 2.968380931057182e-06, + "loss": 0.516, + "step": 2267 + }, + { + "epoch": 0.09358752166377816, + "grad_norm": 2.9683344307848394, + "learning_rate": 2.968339972507187e-06, + "loss": 0.5529, + "step": 2268 + }, + { + "epoch": 0.0936287860031361, + "grad_norm": 2.824967209815102, + "learning_rate": 2.9682989877289674e-06, + "loss": 0.5627, + "step": 2269 + }, + { + "epoch": 0.09367005034249401, + "grad_norm": 2.529179276836101, + "learning_rate": 2.9682579767232544e-06, + "loss": 0.5562, + "step": 2270 + }, + { + "epoch": 0.09371131468185194, + "grad_norm": 6.601761112099529, + "learning_rate": 2.968216939490781e-06, + "loss": 0.5821, + "step": 2271 + }, + { + "epoch": 0.09375257902120987, + "grad_norm": 4.686408869069461, + "learning_rate": 2.96817587603228e-06, + "loss": 0.5299, + "step": 2272 + }, + { + "epoch": 0.0937938433605678, + "grad_norm": 5.493629988783288, + "learning_rate": 2.9681347863484852e-06, + "loss": 0.6368, + "step": 2273 + }, + { + "epoch": 0.09383510769992573, + "grad_norm": 6.126600026854255, + "learning_rate": 2.9680936704401307e-06, + "loss": 0.5474, + "step": 2274 + }, + { + "epoch": 0.09387637203928365, + "grad_norm": 3.063323150706426, + "learning_rate": 2.9680525283079504e-06, + "loss": 0.5376, + "step": 2275 + }, + { + "epoch": 0.09391763637864158, + "grad_norm": 7.166155381722141, + "learning_rate": 2.9680113599526796e-06, + "loss": 0.5499, + "step": 2276 + }, + { + "epoch": 0.0939589007179995, + "grad_norm": 3.1408693386158593, + "learning_rate": 2.967970165375053e-06, + "loss": 0.5761, + "step": 2277 + }, + { + "epoch": 0.09400016505735743, + "grad_norm": 4.569484352338424, + "learning_rate": 2.9679289445758074e-06, + "loss": 0.5792, + "step": 2278 + }, + { + "epoch": 0.09404142939671536, + "grad_norm": 2.862914843985671, + "learning_rate": 2.9678876975556783e-06, + "loss": 0.5605, + "step": 2279 + }, + { + "epoch": 0.09408269373607328, + "grad_norm": 3.8408027363846218, + "learning_rate": 2.9678464243154033e-06, + "loss": 0.5436, + "step": 2280 + }, + { + "epoch": 0.09412395807543121, + "grad_norm": 3.500435311915085, + "learning_rate": 2.9678051248557186e-06, + "loss": 0.5547, + "step": 2281 + }, + { + "epoch": 0.09416522241478914, + "grad_norm": 3.4810643198408973, + "learning_rate": 2.967763799177363e-06, + "loss": 0.54, + "step": 2282 + }, + { + "epoch": 0.09420648675414707, + "grad_norm": 2.9312934621196454, + "learning_rate": 2.967722447281074e-06, + "loss": 0.5965, + "step": 2283 + }, + { + "epoch": 0.09424775109350499, + "grad_norm": 5.129655531644274, + "learning_rate": 2.9676810691675908e-06, + "loss": 0.6298, + "step": 2284 + }, + { + "epoch": 0.09428901543286292, + "grad_norm": 14.16771528675871, + "learning_rate": 2.9676396648376513e-06, + "loss": 0.6079, + "step": 2285 + }, + { + "epoch": 0.09433027977222085, + "grad_norm": 4.1641829049111445, + "learning_rate": 2.9675982342919963e-06, + "loss": 0.5792, + "step": 2286 + }, + { + "epoch": 0.09437154411157878, + "grad_norm": 6.025158048005179, + "learning_rate": 2.9675567775313654e-06, + "loss": 0.5636, + "step": 2287 + }, + { + "epoch": 0.0944128084509367, + "grad_norm": 10.88256651833931, + "learning_rate": 2.967515294556499e-06, + "loss": 0.6002, + "step": 2288 + }, + { + "epoch": 0.09445407279029462, + "grad_norm": 74.35246534558765, + "learning_rate": 2.967473785368139e-06, + "loss": 0.6282, + "step": 2289 + }, + { + "epoch": 0.09449533712965255, + "grad_norm": 3.249666555907002, + "learning_rate": 2.9674322499670255e-06, + "loss": 0.5489, + "step": 2290 + }, + { + "epoch": 0.09453660146901048, + "grad_norm": 3.2824496765604705, + "learning_rate": 2.9673906883539016e-06, + "loss": 0.632, + "step": 2291 + }, + { + "epoch": 0.09457786580836841, + "grad_norm": 2.8078163227014046, + "learning_rate": 2.9673491005295084e-06, + "loss": 0.5655, + "step": 2292 + }, + { + "epoch": 0.09461913014772634, + "grad_norm": 7.417511025616636, + "learning_rate": 2.96730748649459e-06, + "loss": 0.6362, + "step": 2293 + }, + { + "epoch": 0.09466039448708426, + "grad_norm": 6.719010441939565, + "learning_rate": 2.9672658462498895e-06, + "loss": 0.6443, + "step": 2294 + }, + { + "epoch": 0.09470165882644219, + "grad_norm": 8.039531611245188, + "learning_rate": 2.96722417979615e-06, + "loss": 0.5307, + "step": 2295 + }, + { + "epoch": 0.09474292316580012, + "grad_norm": 4.4777578744293445, + "learning_rate": 2.967182487134116e-06, + "loss": 0.5829, + "step": 2296 + }, + { + "epoch": 0.09478418750515805, + "grad_norm": 5.119866273915996, + "learning_rate": 2.967140768264533e-06, + "loss": 0.6009, + "step": 2297 + }, + { + "epoch": 0.09482545184451598, + "grad_norm": 3.5850780320288718, + "learning_rate": 2.967099023188146e-06, + "loss": 0.5686, + "step": 2298 + }, + { + "epoch": 0.09486671618387389, + "grad_norm": 3.758864756291488, + "learning_rate": 2.9670572519056993e-06, + "loss": 0.5802, + "step": 2299 + }, + { + "epoch": 0.09490798052323182, + "grad_norm": 3.3060301075810927, + "learning_rate": 2.9670154544179405e-06, + "loss": 0.5934, + "step": 2300 + }, + { + "epoch": 0.09494924486258975, + "grad_norm": 5.66818577656112, + "learning_rate": 2.966973630725616e-06, + "loss": 0.6044, + "step": 2301 + }, + { + "epoch": 0.09499050920194768, + "grad_norm": 2.866048017000269, + "learning_rate": 2.9669317808294723e-06, + "loss": 0.5717, + "step": 2302 + }, + { + "epoch": 0.0950317735413056, + "grad_norm": 3.6019652822065553, + "learning_rate": 2.9668899047302577e-06, + "loss": 0.5318, + "step": 2303 + }, + { + "epoch": 0.09507303788066353, + "grad_norm": 3.389322556505597, + "learning_rate": 2.966848002428719e-06, + "loss": 0.5679, + "step": 2304 + }, + { + "epoch": 0.09511430222002146, + "grad_norm": 84.10984760370349, + "learning_rate": 2.9668060739256063e-06, + "loss": 0.5425, + "step": 2305 + }, + { + "epoch": 0.09515556655937939, + "grad_norm": 3.386542804850714, + "learning_rate": 2.9667641192216673e-06, + "loss": 0.5912, + "step": 2306 + }, + { + "epoch": 0.09519683089873732, + "grad_norm": 8.011464112709291, + "learning_rate": 2.966722138317652e-06, + "loss": 0.5986, + "step": 2307 + }, + { + "epoch": 0.09523809523809523, + "grad_norm": 2.7809096748655238, + "learning_rate": 2.9666801312143103e-06, + "loss": 0.5626, + "step": 2308 + }, + { + "epoch": 0.09527935957745316, + "grad_norm": 3.359134030340388, + "learning_rate": 2.9666380979123923e-06, + "loss": 0.5375, + "step": 2309 + }, + { + "epoch": 0.09532062391681109, + "grad_norm": 9.62537718968129, + "learning_rate": 2.966596038412649e-06, + "loss": 0.6311, + "step": 2310 + }, + { + "epoch": 0.09536188825616902, + "grad_norm": 4.029121122817803, + "learning_rate": 2.9665539527158313e-06, + "loss": 0.5932, + "step": 2311 + }, + { + "epoch": 0.09540315259552695, + "grad_norm": 20.451203652618492, + "learning_rate": 2.9665118408226914e-06, + "loss": 0.6241, + "step": 2312 + }, + { + "epoch": 0.09544441693488487, + "grad_norm": 7.269266852503116, + "learning_rate": 2.966469702733981e-06, + "loss": 0.5875, + "step": 2313 + }, + { + "epoch": 0.0954856812742428, + "grad_norm": 5.173113363813881, + "learning_rate": 2.9664275384504536e-06, + "loss": 0.6085, + "step": 2314 + }, + { + "epoch": 0.09552694561360073, + "grad_norm": 3.7638188988509467, + "learning_rate": 2.9663853479728622e-06, + "loss": 0.5513, + "step": 2315 + }, + { + "epoch": 0.09556820995295866, + "grad_norm": 2.940587174830425, + "learning_rate": 2.9663431313019598e-06, + "loss": 0.5901, + "step": 2316 + }, + { + "epoch": 0.09560947429231657, + "grad_norm": 6.510091262156742, + "learning_rate": 2.9663008884385006e-06, + "loss": 0.6126, + "step": 2317 + }, + { + "epoch": 0.0956507386316745, + "grad_norm": 2.705016252894216, + "learning_rate": 2.96625861938324e-06, + "loss": 0.6562, + "step": 2318 + }, + { + "epoch": 0.09569200297103243, + "grad_norm": 4.204764770860773, + "learning_rate": 2.966216324136932e-06, + "loss": 0.5806, + "step": 2319 + }, + { + "epoch": 0.09573326731039036, + "grad_norm": 3.007485912517484, + "learning_rate": 2.9661740027003323e-06, + "loss": 0.5689, + "step": 2320 + }, + { + "epoch": 0.09577453164974829, + "grad_norm": 7.0579587143530755, + "learning_rate": 2.9661316550741976e-06, + "loss": 0.6015, + "step": 2321 + }, + { + "epoch": 0.09581579598910621, + "grad_norm": 5.411135291552091, + "learning_rate": 2.966089281259284e-06, + "loss": 0.5454, + "step": 2322 + }, + { + "epoch": 0.09585706032846414, + "grad_norm": 6.729626859306708, + "learning_rate": 2.9660468812563475e-06, + "loss": 0.5687, + "step": 2323 + }, + { + "epoch": 0.09589832466782207, + "grad_norm": 3.1283421268736884, + "learning_rate": 2.9660044550661467e-06, + "loss": 0.605, + "step": 2324 + }, + { + "epoch": 0.09593958900718, + "grad_norm": 6.317661428496651, + "learning_rate": 2.965962002689439e-06, + "loss": 0.6456, + "step": 2325 + }, + { + "epoch": 0.09598085334653793, + "grad_norm": 6.974030335705225, + "learning_rate": 2.9659195241269825e-06, + "loss": 0.5912, + "step": 2326 + }, + { + "epoch": 0.09602211768589584, + "grad_norm": 2.6905666981562204, + "learning_rate": 2.9658770193795364e-06, + "loss": 0.6296, + "step": 2327 + }, + { + "epoch": 0.09606338202525377, + "grad_norm": 3.156171546710795, + "learning_rate": 2.9658344884478596e-06, + "loss": 0.586, + "step": 2328 + }, + { + "epoch": 0.0961046463646117, + "grad_norm": 9.70590719467448, + "learning_rate": 2.9657919313327117e-06, + "loss": 0.5605, + "step": 2329 + }, + { + "epoch": 0.09614591070396963, + "grad_norm": 2.6076401774608495, + "learning_rate": 2.965749348034853e-06, + "loss": 0.5775, + "step": 2330 + }, + { + "epoch": 0.09618717504332755, + "grad_norm": 3.987449189758611, + "learning_rate": 2.9657067385550437e-06, + "loss": 0.5826, + "step": 2331 + }, + { + "epoch": 0.09622843938268548, + "grad_norm": 6.395138538506064, + "learning_rate": 2.965664102894046e-06, + "loss": 0.6526, + "step": 2332 + }, + { + "epoch": 0.09626970372204341, + "grad_norm": 3.6452261210002015, + "learning_rate": 2.9656214410526213e-06, + "loss": 0.5661, + "step": 2333 + }, + { + "epoch": 0.09631096806140134, + "grad_norm": 2.603284694503311, + "learning_rate": 2.9655787530315307e-06, + "loss": 0.5578, + "step": 2334 + }, + { + "epoch": 0.09635223240075927, + "grad_norm": 2.7743757340653863, + "learning_rate": 2.9655360388315376e-06, + "loss": 0.617, + "step": 2335 + }, + { + "epoch": 0.09639349674011718, + "grad_norm": 3.7552796179531236, + "learning_rate": 2.9654932984534043e-06, + "loss": 0.53, + "step": 2336 + }, + { + "epoch": 0.09643476107947511, + "grad_norm": 2.926332452005002, + "learning_rate": 2.9654505318978948e-06, + "loss": 0.5727, + "step": 2337 + }, + { + "epoch": 0.09647602541883304, + "grad_norm": 4.080821298792657, + "learning_rate": 2.9654077391657726e-06, + "loss": 0.5747, + "step": 2338 + }, + { + "epoch": 0.09651728975819097, + "grad_norm": 9.417296990503457, + "learning_rate": 2.9653649202578023e-06, + "loss": 0.6219, + "step": 2339 + }, + { + "epoch": 0.0965585540975489, + "grad_norm": 3.869650116616168, + "learning_rate": 2.9653220751747488e-06, + "loss": 0.5351, + "step": 2340 + }, + { + "epoch": 0.09659981843690682, + "grad_norm": 3.3136950371256426, + "learning_rate": 2.9652792039173776e-06, + "loss": 0.5741, + "step": 2341 + }, + { + "epoch": 0.09664108277626475, + "grad_norm": 3.6327884314153804, + "learning_rate": 2.9652363064864538e-06, + "loss": 0.515, + "step": 2342 + }, + { + "epoch": 0.09668234711562268, + "grad_norm": 5.27970423764555, + "learning_rate": 2.9651933828827446e-06, + "loss": 0.5833, + "step": 2343 + }, + { + "epoch": 0.09672361145498061, + "grad_norm": 4.8368002595810955, + "learning_rate": 2.965150433107016e-06, + "loss": 0.5655, + "step": 2344 + }, + { + "epoch": 0.09676487579433853, + "grad_norm": 12.139955341504491, + "learning_rate": 2.9651074571600355e-06, + "loss": 0.6, + "step": 2345 + }, + { + "epoch": 0.09680614013369646, + "grad_norm": 2.4951981364118727, + "learning_rate": 2.9650644550425703e-06, + "loss": 0.5589, + "step": 2346 + }, + { + "epoch": 0.09684740447305439, + "grad_norm": 7.133184607076421, + "learning_rate": 2.965021426755389e-06, + "loss": 0.5733, + "step": 2347 + }, + { + "epoch": 0.09688866881241232, + "grad_norm": 6.407956966623822, + "learning_rate": 2.9649783722992604e-06, + "loss": 0.6602, + "step": 2348 + }, + { + "epoch": 0.09692993315177025, + "grad_norm": 6.81488677496121, + "learning_rate": 2.9649352916749535e-06, + "loss": 0.6086, + "step": 2349 + }, + { + "epoch": 0.09697119749112816, + "grad_norm": 4.023107336577047, + "learning_rate": 2.964892184883237e-06, + "loss": 0.6178, + "step": 2350 + }, + { + "epoch": 0.09701246183048609, + "grad_norm": 4.359647619735744, + "learning_rate": 2.9648490519248818e-06, + "loss": 0.6075, + "step": 2351 + }, + { + "epoch": 0.09705372616984402, + "grad_norm": 3.5276942393863626, + "learning_rate": 2.964805892800658e-06, + "loss": 0.6156, + "step": 2352 + }, + { + "epoch": 0.09709499050920195, + "grad_norm": 2.6167458620567356, + "learning_rate": 2.9647627075113363e-06, + "loss": 0.5984, + "step": 2353 + }, + { + "epoch": 0.09713625484855988, + "grad_norm": 3.051592784084391, + "learning_rate": 2.964719496057689e-06, + "loss": 0.5731, + "step": 2354 + }, + { + "epoch": 0.0971775191879178, + "grad_norm": 6.274502862535251, + "learning_rate": 2.964676258440487e-06, + "loss": 0.5887, + "step": 2355 + }, + { + "epoch": 0.09721878352727573, + "grad_norm": 14.169301527596266, + "learning_rate": 2.9646329946605023e-06, + "loss": 0.599, + "step": 2356 + }, + { + "epoch": 0.09726004786663366, + "grad_norm": 2.81601209890518, + "learning_rate": 2.964589704718509e-06, + "loss": 0.566, + "step": 2357 + }, + { + "epoch": 0.09730131220599159, + "grad_norm": 3.9719470764794287, + "learning_rate": 2.96454638861528e-06, + "loss": 0.5504, + "step": 2358 + }, + { + "epoch": 0.09734257654534952, + "grad_norm": 5.475299932185133, + "learning_rate": 2.964503046351588e-06, + "loss": 0.5965, + "step": 2359 + }, + { + "epoch": 0.09738384088470743, + "grad_norm": 3.944846863005259, + "learning_rate": 2.9644596779282087e-06, + "loss": 0.5257, + "step": 2360 + }, + { + "epoch": 0.09742510522406536, + "grad_norm": 5.162382685104703, + "learning_rate": 2.964416283345916e-06, + "loss": 0.6008, + "step": 2361 + }, + { + "epoch": 0.09746636956342329, + "grad_norm": 5.171452814130488, + "learning_rate": 2.964372862605485e-06, + "loss": 0.5152, + "step": 2362 + }, + { + "epoch": 0.09750763390278122, + "grad_norm": 3.030904553290498, + "learning_rate": 2.964329415707691e-06, + "loss": 0.5303, + "step": 2363 + }, + { + "epoch": 0.09754889824213914, + "grad_norm": 13.275303148046124, + "learning_rate": 2.9642859426533108e-06, + "loss": 0.5272, + "step": 2364 + }, + { + "epoch": 0.09759016258149707, + "grad_norm": 4.121806536700886, + "learning_rate": 2.96424244344312e-06, + "loss": 0.6007, + "step": 2365 + }, + { + "epoch": 0.097631426920855, + "grad_norm": 7.078589447712722, + "learning_rate": 2.964198918077897e-06, + "loss": 0.5603, + "step": 2366 + }, + { + "epoch": 0.09767269126021293, + "grad_norm": 2.8283379001516504, + "learning_rate": 2.964155366558418e-06, + "loss": 0.5328, + "step": 2367 + }, + { + "epoch": 0.09771395559957086, + "grad_norm": 10.627003519131028, + "learning_rate": 2.9641117888854613e-06, + "loss": 0.5587, + "step": 2368 + }, + { + "epoch": 0.09775521993892877, + "grad_norm": 5.288958379158096, + "learning_rate": 2.964068185059806e-06, + "loss": 0.5683, + "step": 2369 + }, + { + "epoch": 0.0977964842782867, + "grad_norm": 7.046762039781576, + "learning_rate": 2.9640245550822297e-06, + "loss": 0.6784, + "step": 2370 + }, + { + "epoch": 0.09783774861764463, + "grad_norm": 4.113045208134341, + "learning_rate": 2.9639808989535125e-06, + "loss": 0.6067, + "step": 2371 + }, + { + "epoch": 0.09787901295700256, + "grad_norm": 4.560364507404249, + "learning_rate": 2.9639372166744343e-06, + "loss": 0.6024, + "step": 2372 + }, + { + "epoch": 0.09792027729636049, + "grad_norm": 6.116606947210257, + "learning_rate": 2.9638935082457754e-06, + "loss": 0.5421, + "step": 2373 + }, + { + "epoch": 0.09796154163571841, + "grad_norm": 4.666883388548655, + "learning_rate": 2.963849773668316e-06, + "loss": 0.584, + "step": 2374 + }, + { + "epoch": 0.09800280597507634, + "grad_norm": 2.7807122018113044, + "learning_rate": 2.9638060129428376e-06, + "loss": 0.5942, + "step": 2375 + }, + { + "epoch": 0.09804407031443427, + "grad_norm": 3.735977835604625, + "learning_rate": 2.963762226070122e-06, + "loss": 0.5662, + "step": 2376 + }, + { + "epoch": 0.0980853346537922, + "grad_norm": 4.670334031529252, + "learning_rate": 2.963718413050952e-06, + "loss": 0.5844, + "step": 2377 + }, + { + "epoch": 0.09812659899315011, + "grad_norm": 2.877673744054608, + "learning_rate": 2.9636745738861084e-06, + "loss": 0.5658, + "step": 2378 + }, + { + "epoch": 0.09816786333250804, + "grad_norm": 4.354693281779936, + "learning_rate": 2.9636307085763757e-06, + "loss": 0.5708, + "step": 2379 + }, + { + "epoch": 0.09820912767186597, + "grad_norm": 5.073378309313158, + "learning_rate": 2.9635868171225375e-06, + "loss": 0.5122, + "step": 2380 + }, + { + "epoch": 0.0982503920112239, + "grad_norm": 4.510132417459072, + "learning_rate": 2.9635428995253772e-06, + "loss": 0.5828, + "step": 2381 + }, + { + "epoch": 0.09829165635058183, + "grad_norm": 3.472520938704555, + "learning_rate": 2.963498955785679e-06, + "loss": 0.6119, + "step": 2382 + }, + { + "epoch": 0.09833292068993975, + "grad_norm": 2.8901219499039197, + "learning_rate": 2.9634549859042294e-06, + "loss": 0.5359, + "step": 2383 + }, + { + "epoch": 0.09837418502929768, + "grad_norm": 3.8499639687597997, + "learning_rate": 2.963410989881812e-06, + "loss": 0.6296, + "step": 2384 + }, + { + "epoch": 0.09841544936865561, + "grad_norm": 3.1435039268082026, + "learning_rate": 2.9633669677192136e-06, + "loss": 0.5921, + "step": 2385 + }, + { + "epoch": 0.09845671370801354, + "grad_norm": 2.8987914885070927, + "learning_rate": 2.9633229194172204e-06, + "loss": 0.5996, + "step": 2386 + }, + { + "epoch": 0.09849797804737147, + "grad_norm": 20.9071917281236, + "learning_rate": 2.963278844976619e-06, + "loss": 0.5793, + "step": 2387 + }, + { + "epoch": 0.09853924238672938, + "grad_norm": 3.0443825362553034, + "learning_rate": 2.9632347443981966e-06, + "loss": 0.5956, + "step": 2388 + }, + { + "epoch": 0.09858050672608731, + "grad_norm": 3.4927533114653864, + "learning_rate": 2.9631906176827416e-06, + "loss": 0.5897, + "step": 2389 + }, + { + "epoch": 0.09862177106544524, + "grad_norm": 6.317649472079871, + "learning_rate": 2.963146464831042e-06, + "loss": 0.5526, + "step": 2390 + }, + { + "epoch": 0.09866303540480317, + "grad_norm": 3.1384112455543125, + "learning_rate": 2.963102285843886e-06, + "loss": 0.5528, + "step": 2391 + }, + { + "epoch": 0.09870429974416109, + "grad_norm": 5.549623699065802, + "learning_rate": 2.9630580807220634e-06, + "loss": 0.6451, + "step": 2392 + }, + { + "epoch": 0.09874556408351902, + "grad_norm": 3.4600883690139237, + "learning_rate": 2.963013849466363e-06, + "loss": 0.6067, + "step": 2393 + }, + { + "epoch": 0.09878682842287695, + "grad_norm": 5.826383318357421, + "learning_rate": 2.9629695920775756e-06, + "loss": 0.532, + "step": 2394 + }, + { + "epoch": 0.09882809276223488, + "grad_norm": 12.988238765807898, + "learning_rate": 2.9629253085564913e-06, + "loss": 0.5746, + "step": 2395 + }, + { + "epoch": 0.09886935710159281, + "grad_norm": 2.73691517548759, + "learning_rate": 2.962880998903902e-06, + "loss": 0.6483, + "step": 2396 + }, + { + "epoch": 0.09891062144095072, + "grad_norm": 5.475463276872391, + "learning_rate": 2.9628366631205975e-06, + "loss": 0.5464, + "step": 2397 + }, + { + "epoch": 0.09895188578030865, + "grad_norm": 2.5803032831300645, + "learning_rate": 2.9627923012073716e-06, + "loss": 0.5664, + "step": 2398 + }, + { + "epoch": 0.09899315011966658, + "grad_norm": 2.3554206060606377, + "learning_rate": 2.9627479131650152e-06, + "loss": 0.5735, + "step": 2399 + }, + { + "epoch": 0.09903441445902451, + "grad_norm": 3.359675956743655, + "learning_rate": 2.962703498994322e-06, + "loss": 0.621, + "step": 2400 + }, + { + "epoch": 0.09907567879838244, + "grad_norm": 4.0437440284337205, + "learning_rate": 2.9626590586960853e-06, + "loss": 0.6166, + "step": 2401 + }, + { + "epoch": 0.09911694313774036, + "grad_norm": 2.726555941125842, + "learning_rate": 2.962614592271099e-06, + "loss": 0.588, + "step": 2402 + }, + { + "epoch": 0.09915820747709829, + "grad_norm": 4.040714077200297, + "learning_rate": 2.9625700997201566e-06, + "loss": 0.5797, + "step": 2403 + }, + { + "epoch": 0.09919947181645622, + "grad_norm": 4.104023593701126, + "learning_rate": 2.9625255810440537e-06, + "loss": 0.5983, + "step": 2404 + }, + { + "epoch": 0.09924073615581415, + "grad_norm": 6.541708947111622, + "learning_rate": 2.9624810362435858e-06, + "loss": 0.5931, + "step": 2405 + }, + { + "epoch": 0.09928200049517208, + "grad_norm": 4.306413438796065, + "learning_rate": 2.9624364653195477e-06, + "loss": 0.5863, + "step": 2406 + }, + { + "epoch": 0.09932326483453, + "grad_norm": 2.966761875273418, + "learning_rate": 2.9623918682727352e-06, + "loss": 0.5707, + "step": 2407 + }, + { + "epoch": 0.09936452917388792, + "grad_norm": 4.05508870257974, + "learning_rate": 2.9623472451039468e-06, + "loss": 0.5708, + "step": 2408 + }, + { + "epoch": 0.09940579351324585, + "grad_norm": 7.480521991439394, + "learning_rate": 2.9623025958139773e-06, + "loss": 0.6241, + "step": 2409 + }, + { + "epoch": 0.09944705785260378, + "grad_norm": 3.273824052839973, + "learning_rate": 2.9622579204036254e-06, + "loss": 0.5861, + "step": 2410 + }, + { + "epoch": 0.0994883221919617, + "grad_norm": 3.0979344447885837, + "learning_rate": 2.96221321887369e-06, + "loss": 0.5753, + "step": 2411 + }, + { + "epoch": 0.09952958653131963, + "grad_norm": 3.709621775701049, + "learning_rate": 2.9621684912249676e-06, + "loss": 0.5537, + "step": 2412 + }, + { + "epoch": 0.09957085087067756, + "grad_norm": 3.3922022792134463, + "learning_rate": 2.9621237374582584e-06, + "loss": 0.6003, + "step": 2413 + }, + { + "epoch": 0.09961211521003549, + "grad_norm": 3.9100398095771736, + "learning_rate": 2.962078957574362e-06, + "loss": 0.5055, + "step": 2414 + }, + { + "epoch": 0.09965337954939342, + "grad_norm": 5.362176012482597, + "learning_rate": 2.9620341515740775e-06, + "loss": 0.6277, + "step": 2415 + }, + { + "epoch": 0.09969464388875134, + "grad_norm": 8.837856928549567, + "learning_rate": 2.9619893194582055e-06, + "loss": 0.5699, + "step": 2416 + }, + { + "epoch": 0.09973590822810927, + "grad_norm": 5.5034927582377335, + "learning_rate": 2.961944461227547e-06, + "loss": 0.5879, + "step": 2417 + }, + { + "epoch": 0.0997771725674672, + "grad_norm": 4.444288396576836, + "learning_rate": 2.9618995768829036e-06, + "loss": 0.5125, + "step": 2418 + }, + { + "epoch": 0.09981843690682513, + "grad_norm": 7.017199477764068, + "learning_rate": 2.9618546664250763e-06, + "loss": 0.644, + "step": 2419 + }, + { + "epoch": 0.09985970124618306, + "grad_norm": 6.692006553874407, + "learning_rate": 2.9618097298548674e-06, + "loss": 0.5576, + "step": 2420 + }, + { + "epoch": 0.09990096558554097, + "grad_norm": 9.8902249462505, + "learning_rate": 2.96176476717308e-06, + "loss": 0.5871, + "step": 2421 + }, + { + "epoch": 0.0999422299248989, + "grad_norm": 11.860211013077459, + "learning_rate": 2.9617197783805176e-06, + "loss": 0.5685, + "step": 2422 + }, + { + "epoch": 0.09998349426425683, + "grad_norm": 9.512856053423759, + "learning_rate": 2.961674763477983e-06, + "loss": 0.5811, + "step": 2423 + }, + { + "epoch": 0.10002475860361476, + "grad_norm": 6.030848198963714, + "learning_rate": 2.96162972246628e-06, + "loss": 0.5968, + "step": 2424 + }, + { + "epoch": 0.10006602294297268, + "grad_norm": 2.3296029347947647, + "learning_rate": 2.961584655346214e-06, + "loss": 0.5554, + "step": 2425 + }, + { + "epoch": 0.1001072872823306, + "grad_norm": 2.653682399076288, + "learning_rate": 2.96153956211859e-06, + "loss": 0.5937, + "step": 2426 + }, + { + "epoch": 0.10014855162168854, + "grad_norm": 5.618209868847821, + "learning_rate": 2.9614944427842135e-06, + "loss": 0.5827, + "step": 2427 + }, + { + "epoch": 0.10018981596104647, + "grad_norm": 5.286092237539745, + "learning_rate": 2.9614492973438896e-06, + "loss": 0.6088, + "step": 2428 + }, + { + "epoch": 0.1002310803004044, + "grad_norm": 4.309748648011326, + "learning_rate": 2.9614041257984257e-06, + "loss": 0.5943, + "step": 2429 + }, + { + "epoch": 0.10027234463976231, + "grad_norm": 3.3373167383179796, + "learning_rate": 2.961358928148628e-06, + "loss": 0.551, + "step": 2430 + }, + { + "epoch": 0.10031360897912024, + "grad_norm": 2.790366124994582, + "learning_rate": 2.961313704395304e-06, + "loss": 0.6145, + "step": 2431 + }, + { + "epoch": 0.10035487331847817, + "grad_norm": 3.120386997082337, + "learning_rate": 2.9612684545392615e-06, + "loss": 0.5558, + "step": 2432 + }, + { + "epoch": 0.1003961376578361, + "grad_norm": 3.12667570296061, + "learning_rate": 2.961223178581309e-06, + "loss": 0.5706, + "step": 2433 + }, + { + "epoch": 0.10043740199719403, + "grad_norm": 2.399808367905949, + "learning_rate": 2.9611778765222554e-06, + "loss": 0.536, + "step": 2434 + }, + { + "epoch": 0.10047866633655195, + "grad_norm": 4.642574954022511, + "learning_rate": 2.9611325483629094e-06, + "loss": 0.56, + "step": 2435 + }, + { + "epoch": 0.10051993067590988, + "grad_norm": 2.7558086364285916, + "learning_rate": 2.9610871941040807e-06, + "loss": 0.5796, + "step": 2436 + }, + { + "epoch": 0.1005611950152678, + "grad_norm": 2.9283120710348602, + "learning_rate": 2.9610418137465796e-06, + "loss": 0.517, + "step": 2437 + }, + { + "epoch": 0.10060245935462574, + "grad_norm": 3.226044242670531, + "learning_rate": 2.9609964072912174e-06, + "loss": 0.5081, + "step": 2438 + }, + { + "epoch": 0.10064372369398365, + "grad_norm": 3.651775689380705, + "learning_rate": 2.9609509747388037e-06, + "loss": 0.5982, + "step": 2439 + }, + { + "epoch": 0.10068498803334158, + "grad_norm": 3.7160016601549914, + "learning_rate": 2.960905516090151e-06, + "loss": 0.5334, + "step": 2440 + }, + { + "epoch": 0.10072625237269951, + "grad_norm": 3.691882751628799, + "learning_rate": 2.9608600313460717e-06, + "loss": 0.6001, + "step": 2441 + }, + { + "epoch": 0.10076751671205744, + "grad_norm": 4.192583154764613, + "learning_rate": 2.9608145205073772e-06, + "loss": 0.6617, + "step": 2442 + }, + { + "epoch": 0.10080878105141537, + "grad_norm": 3.847711040508321, + "learning_rate": 2.960768983574881e-06, + "loss": 0.569, + "step": 2443 + }, + { + "epoch": 0.10085004539077329, + "grad_norm": 11.34508427950798, + "learning_rate": 2.960723420549397e-06, + "loss": 0.6402, + "step": 2444 + }, + { + "epoch": 0.10089130973013122, + "grad_norm": 5.002570322759988, + "learning_rate": 2.960677831431738e-06, + "loss": 0.562, + "step": 2445 + }, + { + "epoch": 0.10093257406948915, + "grad_norm": 5.147448381441562, + "learning_rate": 2.960632216222719e-06, + "loss": 0.577, + "step": 2446 + }, + { + "epoch": 0.10097383840884708, + "grad_norm": 3.669754471006206, + "learning_rate": 2.9605865749231544e-06, + "loss": 0.5343, + "step": 2447 + }, + { + "epoch": 0.10101510274820501, + "grad_norm": 5.151642236703819, + "learning_rate": 2.96054090753386e-06, + "loss": 0.6449, + "step": 2448 + }, + { + "epoch": 0.10105636708756292, + "grad_norm": 3.3895436778043546, + "learning_rate": 2.9604952140556514e-06, + "loss": 0.5336, + "step": 2449 + }, + { + "epoch": 0.10109763142692085, + "grad_norm": 4.142761662175468, + "learning_rate": 2.9604494944893447e-06, + "loss": 0.6209, + "step": 2450 + }, + { + "epoch": 0.10113889576627878, + "grad_norm": 6.204283434401515, + "learning_rate": 2.960403748835756e-06, + "loss": 0.5741, + "step": 2451 + }, + { + "epoch": 0.10118016010563671, + "grad_norm": 3.4920729139193063, + "learning_rate": 2.960357977095704e-06, + "loss": 0.608, + "step": 2452 + }, + { + "epoch": 0.10122142444499463, + "grad_norm": 3.216966069324383, + "learning_rate": 2.9603121792700045e-06, + "loss": 0.5646, + "step": 2453 + }, + { + "epoch": 0.10126268878435256, + "grad_norm": 3.118406177489252, + "learning_rate": 2.9602663553594764e-06, + "loss": 0.5701, + "step": 2454 + }, + { + "epoch": 0.10130395312371049, + "grad_norm": 4.608835177841589, + "learning_rate": 2.9602205053649384e-06, + "loss": 0.5711, + "step": 2455 + }, + { + "epoch": 0.10134521746306842, + "grad_norm": 2.9238212061985926, + "learning_rate": 2.960174629287209e-06, + "loss": 0.5207, + "step": 2456 + }, + { + "epoch": 0.10138648180242635, + "grad_norm": 3.771368953156656, + "learning_rate": 2.9601287271271083e-06, + "loss": 0.5673, + "step": 2457 + }, + { + "epoch": 0.10142774614178426, + "grad_norm": 3.2296035926160487, + "learning_rate": 2.9600827988854555e-06, + "loss": 0.6281, + "step": 2458 + }, + { + "epoch": 0.1014690104811422, + "grad_norm": 2.657955050472471, + "learning_rate": 2.9600368445630718e-06, + "loss": 0.4674, + "step": 2459 + }, + { + "epoch": 0.10151027482050012, + "grad_norm": 3.028666081207159, + "learning_rate": 2.959990864160777e-06, + "loss": 0.5484, + "step": 2460 + }, + { + "epoch": 0.10155153915985805, + "grad_norm": 4.6693869720558565, + "learning_rate": 2.9599448576793935e-06, + "loss": 0.5473, + "step": 2461 + }, + { + "epoch": 0.10159280349921598, + "grad_norm": 3.4753858026566475, + "learning_rate": 2.959898825119742e-06, + "loss": 0.5255, + "step": 2462 + }, + { + "epoch": 0.1016340678385739, + "grad_norm": 2.6565278189409796, + "learning_rate": 2.959852766482646e-06, + "loss": 0.5576, + "step": 2463 + }, + { + "epoch": 0.10167533217793183, + "grad_norm": 10.664169709533303, + "learning_rate": 2.959806681768927e-06, + "loss": 0.5742, + "step": 2464 + }, + { + "epoch": 0.10171659651728976, + "grad_norm": 3.71836724282985, + "learning_rate": 2.9597605709794097e-06, + "loss": 0.5093, + "step": 2465 + }, + { + "epoch": 0.10175786085664769, + "grad_norm": 6.7108104726727635, + "learning_rate": 2.9597144341149164e-06, + "loss": 0.604, + "step": 2466 + }, + { + "epoch": 0.10179912519600562, + "grad_norm": 5.631430930746006, + "learning_rate": 2.9596682711762714e-06, + "loss": 0.6064, + "step": 2467 + }, + { + "epoch": 0.10184038953536353, + "grad_norm": 11.890763864954883, + "learning_rate": 2.9596220821642997e-06, + "loss": 0.5942, + "step": 2468 + }, + { + "epoch": 0.10188165387472146, + "grad_norm": 2.607169731130218, + "learning_rate": 2.9595758670798263e-06, + "loss": 0.5527, + "step": 2469 + }, + { + "epoch": 0.1019229182140794, + "grad_norm": 3.171103737658806, + "learning_rate": 2.9595296259236763e-06, + "loss": 0.6872, + "step": 2470 + }, + { + "epoch": 0.10196418255343732, + "grad_norm": 3.586552797541722, + "learning_rate": 2.9594833586966763e-06, + "loss": 0.521, + "step": 2471 + }, + { + "epoch": 0.10200544689279524, + "grad_norm": 7.759227013380616, + "learning_rate": 2.9594370653996527e-06, + "loss": 0.6043, + "step": 2472 + }, + { + "epoch": 0.10204671123215317, + "grad_norm": 2.490280708123922, + "learning_rate": 2.9593907460334316e-06, + "loss": 0.5907, + "step": 2473 + }, + { + "epoch": 0.1020879755715111, + "grad_norm": 4.508312082618622, + "learning_rate": 2.9593444005988416e-06, + "loss": 0.58, + "step": 2474 + }, + { + "epoch": 0.10212923991086903, + "grad_norm": 3.6178857473969197, + "learning_rate": 2.9592980290967093e-06, + "loss": 0.6019, + "step": 2475 + }, + { + "epoch": 0.10217050425022696, + "grad_norm": 3.1838164091828696, + "learning_rate": 2.959251631527864e-06, + "loss": 0.5995, + "step": 2476 + }, + { + "epoch": 0.10221176858958488, + "grad_norm": 2.2564815150530904, + "learning_rate": 2.9592052078931335e-06, + "loss": 0.5487, + "step": 2477 + }, + { + "epoch": 0.1022530329289428, + "grad_norm": 2.577516219745642, + "learning_rate": 2.959158758193348e-06, + "loss": 0.5549, + "step": 2478 + }, + { + "epoch": 0.10229429726830074, + "grad_norm": 4.565891916167473, + "learning_rate": 2.959112282429337e-06, + "loss": 0.5413, + "step": 2479 + }, + { + "epoch": 0.10233556160765866, + "grad_norm": 11.218015229946612, + "learning_rate": 2.95906578060193e-06, + "loss": 0.597, + "step": 2480 + }, + { + "epoch": 0.1023768259470166, + "grad_norm": 7.057167074355086, + "learning_rate": 2.9590192527119582e-06, + "loss": 0.6086, + "step": 2481 + }, + { + "epoch": 0.10241809028637451, + "grad_norm": 3.8236424491532786, + "learning_rate": 2.958972698760253e-06, + "loss": 0.6566, + "step": 2482 + }, + { + "epoch": 0.10245935462573244, + "grad_norm": 2.6170097108209047, + "learning_rate": 2.9589261187476453e-06, + "loss": 0.5671, + "step": 2483 + }, + { + "epoch": 0.10250061896509037, + "grad_norm": 4.656311943887713, + "learning_rate": 2.958879512674968e-06, + "loss": 0.5334, + "step": 2484 + }, + { + "epoch": 0.1025418833044483, + "grad_norm": 5.44366246110285, + "learning_rate": 2.958832880543052e-06, + "loss": 0.5856, + "step": 2485 + }, + { + "epoch": 0.10258314764380622, + "grad_norm": 3.476253454126128, + "learning_rate": 2.958786222352732e-06, + "loss": 0.5613, + "step": 2486 + }, + { + "epoch": 0.10262441198316415, + "grad_norm": 3.5413404075116963, + "learning_rate": 2.9587395381048413e-06, + "loss": 0.5965, + "step": 2487 + }, + { + "epoch": 0.10266567632252208, + "grad_norm": 6.9845822051170305, + "learning_rate": 2.9586928278002124e-06, + "loss": 0.6285, + "step": 2488 + }, + { + "epoch": 0.10270694066188, + "grad_norm": 4.701171836502646, + "learning_rate": 2.9586460914396808e-06, + "loss": 0.5595, + "step": 2489 + }, + { + "epoch": 0.10274820500123794, + "grad_norm": 8.905067823783936, + "learning_rate": 2.958599329024081e-06, + "loss": 0.5782, + "step": 2490 + }, + { + "epoch": 0.10278946934059585, + "grad_norm": 8.028235820658894, + "learning_rate": 2.958552540554248e-06, + "loss": 0.6101, + "step": 2491 + }, + { + "epoch": 0.10283073367995378, + "grad_norm": 9.827645995270574, + "learning_rate": 2.9585057260310183e-06, + "loss": 0.555, + "step": 2492 + }, + { + "epoch": 0.10287199801931171, + "grad_norm": 4.257693804137113, + "learning_rate": 2.958458885455228e-06, + "loss": 0.6109, + "step": 2493 + }, + { + "epoch": 0.10291326235866964, + "grad_norm": 4.541102634870463, + "learning_rate": 2.958412018827713e-06, + "loss": 0.5383, + "step": 2494 + }, + { + "epoch": 0.10295452669802757, + "grad_norm": 4.9250697154926275, + "learning_rate": 2.9583651261493106e-06, + "loss": 0.5307, + "step": 2495 + }, + { + "epoch": 0.10299579103738549, + "grad_norm": 3.839100383568115, + "learning_rate": 2.9583182074208595e-06, + "loss": 0.5834, + "step": 2496 + }, + { + "epoch": 0.10303705537674342, + "grad_norm": 10.657686426573838, + "learning_rate": 2.958271262643197e-06, + "loss": 0.575, + "step": 2497 + }, + { + "epoch": 0.10307831971610135, + "grad_norm": 14.764383153277032, + "learning_rate": 2.958224291817161e-06, + "loss": 0.6139, + "step": 2498 + }, + { + "epoch": 0.10311958405545928, + "grad_norm": 3.798315065960495, + "learning_rate": 2.958177294943592e-06, + "loss": 0.5942, + "step": 2499 + }, + { + "epoch": 0.10316084839481719, + "grad_norm": 4.243101175706441, + "learning_rate": 2.9581302720233288e-06, + "loss": 0.5956, + "step": 2500 + }, + { + "epoch": 0.10320211273417512, + "grad_norm": 3.8019831864615083, + "learning_rate": 2.9580832230572106e-06, + "loss": 0.6011, + "step": 2501 + }, + { + "epoch": 0.10324337707353305, + "grad_norm": 3.343899428384781, + "learning_rate": 2.9580361480460787e-06, + "loss": 0.5428, + "step": 2502 + }, + { + "epoch": 0.10328464141289098, + "grad_norm": 3.205114256307482, + "learning_rate": 2.9579890469907738e-06, + "loss": 0.5667, + "step": 2503 + }, + { + "epoch": 0.10332590575224891, + "grad_norm": 6.486243404006152, + "learning_rate": 2.9579419198921377e-06, + "loss": 0.5731, + "step": 2504 + }, + { + "epoch": 0.10336717009160683, + "grad_norm": 3.502665851864618, + "learning_rate": 2.9578947667510107e-06, + "loss": 0.5484, + "step": 2505 + }, + { + "epoch": 0.10340843443096476, + "grad_norm": 3.0373971535283655, + "learning_rate": 2.9578475875682365e-06, + "loss": 0.5281, + "step": 2506 + }, + { + "epoch": 0.10344969877032269, + "grad_norm": 5.405587485535088, + "learning_rate": 2.957800382344658e-06, + "loss": 0.595, + "step": 2507 + }, + { + "epoch": 0.10349096310968062, + "grad_norm": 6.5321391001611815, + "learning_rate": 2.9577531510811172e-06, + "loss": 0.5959, + "step": 2508 + }, + { + "epoch": 0.10353222744903855, + "grad_norm": 2.6851563700220287, + "learning_rate": 2.957705893778458e-06, + "loss": 0.5066, + "step": 2509 + }, + { + "epoch": 0.10357349178839646, + "grad_norm": 3.191727366441321, + "learning_rate": 2.9576586104375256e-06, + "loss": 0.5516, + "step": 2510 + }, + { + "epoch": 0.10361475612775439, + "grad_norm": 3.08355179504299, + "learning_rate": 2.957611301059164e-06, + "loss": 0.6552, + "step": 2511 + }, + { + "epoch": 0.10365602046711232, + "grad_norm": 2.585967505728303, + "learning_rate": 2.957563965644218e-06, + "loss": 0.5907, + "step": 2512 + }, + { + "epoch": 0.10369728480647025, + "grad_norm": 4.872496947679218, + "learning_rate": 2.9575166041935328e-06, + "loss": 0.5451, + "step": 2513 + }, + { + "epoch": 0.10373854914582818, + "grad_norm": 4.312550017737313, + "learning_rate": 2.9574692167079554e-06, + "loss": 0.5467, + "step": 2514 + }, + { + "epoch": 0.1037798134851861, + "grad_norm": 3.7269107417953546, + "learning_rate": 2.9574218031883317e-06, + "loss": 0.6131, + "step": 2515 + }, + { + "epoch": 0.10382107782454403, + "grad_norm": 47.64777205749646, + "learning_rate": 2.957374363635509e-06, + "loss": 0.6076, + "step": 2516 + }, + { + "epoch": 0.10386234216390196, + "grad_norm": 3.7934916975359596, + "learning_rate": 2.9573268980503337e-06, + "loss": 0.5641, + "step": 2517 + }, + { + "epoch": 0.10390360650325989, + "grad_norm": 2.593786358505945, + "learning_rate": 2.9572794064336547e-06, + "loss": 0.5405, + "step": 2518 + }, + { + "epoch": 0.1039448708426178, + "grad_norm": 3.1140870450914138, + "learning_rate": 2.95723188878632e-06, + "loss": 0.6653, + "step": 2519 + }, + { + "epoch": 0.10398613518197573, + "grad_norm": 4.1781144253838765, + "learning_rate": 2.957184345109178e-06, + "loss": 0.5724, + "step": 2520 + }, + { + "epoch": 0.10402739952133366, + "grad_norm": 3.324100612772404, + "learning_rate": 2.957136775403079e-06, + "loss": 0.5992, + "step": 2521 + }, + { + "epoch": 0.1040686638606916, + "grad_norm": 2.414220908328319, + "learning_rate": 2.957089179668871e-06, + "loss": 0.5938, + "step": 2522 + }, + { + "epoch": 0.10410992820004952, + "grad_norm": 2.4086532651163646, + "learning_rate": 2.9570415579074057e-06, + "loss": 0.5765, + "step": 2523 + }, + { + "epoch": 0.10415119253940744, + "grad_norm": 5.593723901909046, + "learning_rate": 2.9569939101195337e-06, + "loss": 0.5449, + "step": 2524 + }, + { + "epoch": 0.10419245687876537, + "grad_norm": 2.3626042282397894, + "learning_rate": 2.9569462363061046e-06, + "loss": 0.568, + "step": 2525 + }, + { + "epoch": 0.1042337212181233, + "grad_norm": 3.1845031340192334, + "learning_rate": 2.956898536467972e-06, + "loss": 0.533, + "step": 2526 + }, + { + "epoch": 0.10427498555748123, + "grad_norm": 3.1675848418926837, + "learning_rate": 2.956850810605986e-06, + "loss": 0.5544, + "step": 2527 + }, + { + "epoch": 0.10431624989683916, + "grad_norm": 6.314829811902782, + "learning_rate": 2.9568030587210005e-06, + "loss": 0.5596, + "step": 2528 + }, + { + "epoch": 0.10435751423619707, + "grad_norm": 2.856072614663881, + "learning_rate": 2.956755280813868e-06, + "loss": 0.559, + "step": 2529 + }, + { + "epoch": 0.104398778575555, + "grad_norm": 3.0699679072131065, + "learning_rate": 2.956707476885442e-06, + "loss": 0.5821, + "step": 2530 + }, + { + "epoch": 0.10444004291491293, + "grad_norm": 7.029666100882937, + "learning_rate": 2.9566596469365766e-06, + "loss": 0.5627, + "step": 2531 + }, + { + "epoch": 0.10448130725427086, + "grad_norm": 7.548032704003504, + "learning_rate": 2.956611790968125e-06, + "loss": 0.5672, + "step": 2532 + }, + { + "epoch": 0.10452257159362878, + "grad_norm": 4.3604681391019655, + "learning_rate": 2.956563908980944e-06, + "loss": 0.5804, + "step": 2533 + }, + { + "epoch": 0.10456383593298671, + "grad_norm": 4.086864938215322, + "learning_rate": 2.956516000975887e-06, + "loss": 0.5471, + "step": 2534 + }, + { + "epoch": 0.10460510027234464, + "grad_norm": 12.10290417704781, + "learning_rate": 2.956468066953811e-06, + "loss": 0.584, + "step": 2535 + }, + { + "epoch": 0.10464636461170257, + "grad_norm": 3.5841669657617192, + "learning_rate": 2.956420106915572e-06, + "loss": 0.5458, + "step": 2536 + }, + { + "epoch": 0.1046876289510605, + "grad_norm": 3.7909385579210504, + "learning_rate": 2.956372120862026e-06, + "loss": 0.6162, + "step": 2537 + }, + { + "epoch": 0.10472889329041841, + "grad_norm": 8.00058666256134, + "learning_rate": 2.956324108794031e-06, + "loss": 0.5518, + "step": 2538 + }, + { + "epoch": 0.10477015762977634, + "grad_norm": 11.513341292351324, + "learning_rate": 2.9562760707124443e-06, + "loss": 0.6227, + "step": 2539 + }, + { + "epoch": 0.10481142196913427, + "grad_norm": 3.405673122718217, + "learning_rate": 2.9562280066181235e-06, + "loss": 0.5855, + "step": 2540 + }, + { + "epoch": 0.1048526863084922, + "grad_norm": 2.598601859461365, + "learning_rate": 2.956179916511928e-06, + "loss": 0.5556, + "step": 2541 + }, + { + "epoch": 0.10489395064785013, + "grad_norm": 5.829825001748462, + "learning_rate": 2.9561318003947158e-06, + "loss": 0.607, + "step": 2542 + }, + { + "epoch": 0.10493521498720805, + "grad_norm": 3.144766521660832, + "learning_rate": 2.9560836582673475e-06, + "loss": 0.5856, + "step": 2543 + }, + { + "epoch": 0.10497647932656598, + "grad_norm": 31.706128082006803, + "learning_rate": 2.9560354901306823e-06, + "loss": 0.6135, + "step": 2544 + }, + { + "epoch": 0.10501774366592391, + "grad_norm": 3.7416538791643474, + "learning_rate": 2.9559872959855808e-06, + "loss": 0.5993, + "step": 2545 + }, + { + "epoch": 0.10505900800528184, + "grad_norm": 3.3984886286586864, + "learning_rate": 2.955939075832904e-06, + "loss": 0.5975, + "step": 2546 + }, + { + "epoch": 0.10510027234463976, + "grad_norm": 4.024711714262104, + "learning_rate": 2.9558908296735128e-06, + "loss": 0.6393, + "step": 2547 + }, + { + "epoch": 0.10514153668399769, + "grad_norm": 4.26063077087935, + "learning_rate": 2.9558425575082696e-06, + "loss": 0.5425, + "step": 2548 + }, + { + "epoch": 0.10518280102335562, + "grad_norm": 37.97732433670639, + "learning_rate": 2.9557942593380366e-06, + "loss": 0.5608, + "step": 2549 + }, + { + "epoch": 0.10522406536271355, + "grad_norm": 2.350820307283611, + "learning_rate": 2.9557459351636758e-06, + "loss": 0.6399, + "step": 2550 + }, + { + "epoch": 0.10526532970207148, + "grad_norm": 13.961686340629303, + "learning_rate": 2.955697584986051e-06, + "loss": 0.5834, + "step": 2551 + }, + { + "epoch": 0.10530659404142939, + "grad_norm": 24.500934583200085, + "learning_rate": 2.9556492088060254e-06, + "loss": 0.6015, + "step": 2552 + }, + { + "epoch": 0.10534785838078732, + "grad_norm": 6.077502185215628, + "learning_rate": 2.9556008066244643e-06, + "loss": 0.5588, + "step": 2553 + }, + { + "epoch": 0.10538912272014525, + "grad_norm": 2.81812912483206, + "learning_rate": 2.955552378442231e-06, + "loss": 0.5462, + "step": 2554 + }, + { + "epoch": 0.10543038705950318, + "grad_norm": 3.4254300532113224, + "learning_rate": 2.9555039242601913e-06, + "loss": 0.5662, + "step": 2555 + }, + { + "epoch": 0.10547165139886111, + "grad_norm": 4.3487080317097, + "learning_rate": 2.9554554440792095e-06, + "loss": 0.6306, + "step": 2556 + }, + { + "epoch": 0.10551291573821903, + "grad_norm": 3.668928327920458, + "learning_rate": 2.9554069379001535e-06, + "loss": 0.5728, + "step": 2557 + }, + { + "epoch": 0.10555418007757696, + "grad_norm": 2.153172880562939, + "learning_rate": 2.955358405723888e-06, + "loss": 0.5204, + "step": 2558 + }, + { + "epoch": 0.10559544441693489, + "grad_norm": 2.3565980535944955, + "learning_rate": 2.955309847551281e-06, + "loss": 0.5206, + "step": 2559 + }, + { + "epoch": 0.10563670875629282, + "grad_norm": 6.007986765201419, + "learning_rate": 2.9552612633831995e-06, + "loss": 0.5519, + "step": 2560 + }, + { + "epoch": 0.10567797309565073, + "grad_norm": 3.245724550912009, + "learning_rate": 2.955212653220511e-06, + "loss": 0.6199, + "step": 2561 + }, + { + "epoch": 0.10571923743500866, + "grad_norm": 6.586780813513655, + "learning_rate": 2.9551640170640843e-06, + "loss": 0.6105, + "step": 2562 + }, + { + "epoch": 0.10576050177436659, + "grad_norm": 3.9846163915651798, + "learning_rate": 2.955115354914788e-06, + "loss": 0.5866, + "step": 2563 + }, + { + "epoch": 0.10580176611372452, + "grad_norm": 4.082312439861413, + "learning_rate": 2.9550666667734913e-06, + "loss": 0.5316, + "step": 2564 + }, + { + "epoch": 0.10584303045308245, + "grad_norm": 5.5206955899869, + "learning_rate": 2.955017952641064e-06, + "loss": 0.5583, + "step": 2565 + }, + { + "epoch": 0.10588429479244037, + "grad_norm": 4.926054627722616, + "learning_rate": 2.9549692125183763e-06, + "loss": 0.5929, + "step": 2566 + }, + { + "epoch": 0.1059255591317983, + "grad_norm": 2.606988815415895, + "learning_rate": 2.9549204464062985e-06, + "loss": 0.5362, + "step": 2567 + }, + { + "epoch": 0.10596682347115623, + "grad_norm": 4.236117374315519, + "learning_rate": 2.954871654305702e-06, + "loss": 0.5804, + "step": 2568 + }, + { + "epoch": 0.10600808781051416, + "grad_norm": 2.9295004114016905, + "learning_rate": 2.9548228362174582e-06, + "loss": 0.5312, + "step": 2569 + }, + { + "epoch": 0.10604935214987209, + "grad_norm": 3.0278364329729968, + "learning_rate": 2.954773992142439e-06, + "loss": 0.5978, + "step": 2570 + }, + { + "epoch": 0.10609061648923, + "grad_norm": 6.8890828734439555, + "learning_rate": 2.954725122081517e-06, + "loss": 0.5563, + "step": 2571 + }, + { + "epoch": 0.10613188082858793, + "grad_norm": 4.086502637723713, + "learning_rate": 2.954676226035565e-06, + "loss": 0.596, + "step": 2572 + }, + { + "epoch": 0.10617314516794586, + "grad_norm": 5.422353737477836, + "learning_rate": 2.954627304005457e-06, + "loss": 0.6163, + "step": 2573 + }, + { + "epoch": 0.10621440950730379, + "grad_norm": 4.633035798704782, + "learning_rate": 2.9545783559920657e-06, + "loss": 0.5709, + "step": 2574 + }, + { + "epoch": 0.10625567384666172, + "grad_norm": 4.569614270598896, + "learning_rate": 2.954529381996267e-06, + "loss": 0.6104, + "step": 2575 + }, + { + "epoch": 0.10629693818601964, + "grad_norm": 5.072229710277679, + "learning_rate": 2.9544803820189346e-06, + "loss": 0.5594, + "step": 2576 + }, + { + "epoch": 0.10633820252537757, + "grad_norm": 8.899109016295146, + "learning_rate": 2.9544313560609443e-06, + "loss": 0.6315, + "step": 2577 + }, + { + "epoch": 0.1063794668647355, + "grad_norm": 2.5779137731018458, + "learning_rate": 2.954382304123171e-06, + "loss": 0.5424, + "step": 2578 + }, + { + "epoch": 0.10642073120409343, + "grad_norm": 2.863808951974925, + "learning_rate": 2.9543332262064912e-06, + "loss": 0.5684, + "step": 2579 + }, + { + "epoch": 0.10646199554345134, + "grad_norm": 4.817276282327468, + "learning_rate": 2.9542841223117827e-06, + "loss": 0.6309, + "step": 2580 + }, + { + "epoch": 0.10650325988280927, + "grad_norm": 4.97646948644186, + "learning_rate": 2.9542349924399213e-06, + "loss": 0.5955, + "step": 2581 + }, + { + "epoch": 0.1065445242221672, + "grad_norm": 3.6691703155243722, + "learning_rate": 2.954185836591785e-06, + "loss": 0.5568, + "step": 2582 + }, + { + "epoch": 0.10658578856152513, + "grad_norm": 3.0723418919966754, + "learning_rate": 2.9541366547682517e-06, + "loss": 0.5997, + "step": 2583 + }, + { + "epoch": 0.10662705290088306, + "grad_norm": 3.753620399659851, + "learning_rate": 2.9540874469702002e-06, + "loss": 0.594, + "step": 2584 + }, + { + "epoch": 0.10666831724024098, + "grad_norm": 3.911789366178605, + "learning_rate": 2.9540382131985094e-06, + "loss": 0.5848, + "step": 2585 + }, + { + "epoch": 0.10670958157959891, + "grad_norm": 2.0838203099538815, + "learning_rate": 2.9539889534540584e-06, + "loss": 0.5431, + "step": 2586 + }, + { + "epoch": 0.10675084591895684, + "grad_norm": 3.358768880459728, + "learning_rate": 2.9539396677377274e-06, + "loss": 0.5694, + "step": 2587 + }, + { + "epoch": 0.10679211025831477, + "grad_norm": 3.2620636172588413, + "learning_rate": 2.9538903560503967e-06, + "loss": 0.5536, + "step": 2588 + }, + { + "epoch": 0.1068333745976727, + "grad_norm": 6.646611751013085, + "learning_rate": 2.9538410183929474e-06, + "loss": 0.6283, + "step": 2589 + }, + { + "epoch": 0.10687463893703061, + "grad_norm": 2.9322939976916422, + "learning_rate": 2.9537916547662603e-06, + "loss": 0.5543, + "step": 2590 + }, + { + "epoch": 0.10691590327638854, + "grad_norm": 3.2551102304046617, + "learning_rate": 2.953742265171218e-06, + "loss": 0.6026, + "step": 2591 + }, + { + "epoch": 0.10695716761574647, + "grad_norm": 5.309369750393833, + "learning_rate": 2.9536928496087016e-06, + "loss": 0.6407, + "step": 2592 + }, + { + "epoch": 0.1069984319551044, + "grad_norm": 4.871593986681212, + "learning_rate": 2.9536434080795947e-06, + "loss": 0.6302, + "step": 2593 + }, + { + "epoch": 0.10703969629446232, + "grad_norm": 9.478768375292864, + "learning_rate": 2.9535939405847797e-06, + "loss": 0.5145, + "step": 2594 + }, + { + "epoch": 0.10708096063382025, + "grad_norm": 3.454528372817777, + "learning_rate": 2.953544447125141e-06, + "loss": 0.477, + "step": 2595 + }, + { + "epoch": 0.10712222497317818, + "grad_norm": 4.847727199394969, + "learning_rate": 2.9534949277015615e-06, + "loss": 0.5892, + "step": 2596 + }, + { + "epoch": 0.10716348931253611, + "grad_norm": 3.5536336982856165, + "learning_rate": 2.9534453823149273e-06, + "loss": 0.5813, + "step": 2597 + }, + { + "epoch": 0.10720475365189404, + "grad_norm": 5.767613525713308, + "learning_rate": 2.953395810966122e-06, + "loss": 0.5869, + "step": 2598 + }, + { + "epoch": 0.10724601799125195, + "grad_norm": 2.9423077746641906, + "learning_rate": 2.9533462136560317e-06, + "loss": 0.5672, + "step": 2599 + }, + { + "epoch": 0.10728728233060988, + "grad_norm": 3.7369546590589775, + "learning_rate": 2.953296590385543e-06, + "loss": 0.5775, + "step": 2600 + }, + { + "epoch": 0.10732854666996781, + "grad_norm": 3.0415912536691763, + "learning_rate": 2.9532469411555414e-06, + "loss": 0.5192, + "step": 2601 + }, + { + "epoch": 0.10736981100932574, + "grad_norm": 2.7468150418851844, + "learning_rate": 2.953197265966913e-06, + "loss": 0.5501, + "step": 2602 + }, + { + "epoch": 0.10741107534868367, + "grad_norm": 3.5794759907024445, + "learning_rate": 2.9531475648205474e-06, + "loss": 0.6256, + "step": 2603 + }, + { + "epoch": 0.10745233968804159, + "grad_norm": 2.682948871690893, + "learning_rate": 2.9530978377173304e-06, + "loss": 0.5191, + "step": 2604 + }, + { + "epoch": 0.10749360402739952, + "grad_norm": 13.185767977945392, + "learning_rate": 2.9530480846581512e-06, + "loss": 0.571, + "step": 2605 + }, + { + "epoch": 0.10753486836675745, + "grad_norm": 7.786583066368358, + "learning_rate": 2.952998305643898e-06, + "loss": 0.5816, + "step": 2606 + }, + { + "epoch": 0.10757613270611538, + "grad_norm": 15.036230907606733, + "learning_rate": 2.95294850067546e-06, + "loss": 0.529, + "step": 2607 + }, + { + "epoch": 0.1076173970454733, + "grad_norm": 3.2084503309072225, + "learning_rate": 2.9528986697537275e-06, + "loss": 0.597, + "step": 2608 + }, + { + "epoch": 0.10765866138483123, + "grad_norm": 5.43738938042503, + "learning_rate": 2.95284881287959e-06, + "loss": 0.5618, + "step": 2609 + }, + { + "epoch": 0.10769992572418915, + "grad_norm": 3.993787183469047, + "learning_rate": 2.9527989300539383e-06, + "loss": 0.6144, + "step": 2610 + }, + { + "epoch": 0.10774119006354708, + "grad_norm": 7.0043762771783395, + "learning_rate": 2.9527490212776633e-06, + "loss": 0.6081, + "step": 2611 + }, + { + "epoch": 0.10778245440290501, + "grad_norm": 3.882118696328211, + "learning_rate": 2.9526990865516564e-06, + "loss": 0.5853, + "step": 2612 + }, + { + "epoch": 0.10782371874226293, + "grad_norm": 3.8808299924307095, + "learning_rate": 2.9526491258768097e-06, + "loss": 0.6009, + "step": 2613 + }, + { + "epoch": 0.10786498308162086, + "grad_norm": 2.8343555388947173, + "learning_rate": 2.952599139254016e-06, + "loss": 0.5533, + "step": 2614 + }, + { + "epoch": 0.10790624742097879, + "grad_norm": 6.032494794817979, + "learning_rate": 2.952549126684168e-06, + "loss": 0.6243, + "step": 2615 + }, + { + "epoch": 0.10794751176033672, + "grad_norm": 4.044806954522367, + "learning_rate": 2.952499088168158e-06, + "loss": 0.5694, + "step": 2616 + }, + { + "epoch": 0.10798877609969465, + "grad_norm": 3.665131498061333, + "learning_rate": 2.952449023706881e-06, + "loss": 0.6078, + "step": 2617 + }, + { + "epoch": 0.10803004043905257, + "grad_norm": 3.2261056347905512, + "learning_rate": 2.9523989333012313e-06, + "loss": 0.5778, + "step": 2618 + }, + { + "epoch": 0.1080713047784105, + "grad_norm": 6.026921900469985, + "learning_rate": 2.9523488169521033e-06, + "loss": 0.5433, + "step": 2619 + }, + { + "epoch": 0.10811256911776843, + "grad_norm": 5.213796182417341, + "learning_rate": 2.9522986746603924e-06, + "loss": 0.5816, + "step": 2620 + }, + { + "epoch": 0.10815383345712636, + "grad_norm": 4.697418585846306, + "learning_rate": 2.952248506426994e-06, + "loss": 0.5771, + "step": 2621 + }, + { + "epoch": 0.10819509779648427, + "grad_norm": 2.7150037328785315, + "learning_rate": 2.952198312252804e-06, + "loss": 0.5505, + "step": 2622 + }, + { + "epoch": 0.1082363621358422, + "grad_norm": 4.901425408848543, + "learning_rate": 2.952148092138719e-06, + "loss": 0.5683, + "step": 2623 + }, + { + "epoch": 0.10827762647520013, + "grad_norm": 6.04333731406258, + "learning_rate": 2.952097846085637e-06, + "loss": 0.5582, + "step": 2624 + }, + { + "epoch": 0.10831889081455806, + "grad_norm": 6.360902849991473, + "learning_rate": 2.952047574094454e-06, + "loss": 0.6133, + "step": 2625 + }, + { + "epoch": 0.10836015515391599, + "grad_norm": 7.755213482817037, + "learning_rate": 2.9519972761660694e-06, + "loss": 0.5886, + "step": 2626 + }, + { + "epoch": 0.1084014194932739, + "grad_norm": 3.0431872381580023, + "learning_rate": 2.9519469523013816e-06, + "loss": 0.5604, + "step": 2627 + }, + { + "epoch": 0.10844268383263184, + "grad_norm": 4.540231403274339, + "learning_rate": 2.9518966025012883e-06, + "loss": 0.5686, + "step": 2628 + }, + { + "epoch": 0.10848394817198977, + "grad_norm": 4.490614014692348, + "learning_rate": 2.9518462267666898e-06, + "loss": 0.5862, + "step": 2629 + }, + { + "epoch": 0.1085252125113477, + "grad_norm": 4.300472371497213, + "learning_rate": 2.951795825098486e-06, + "loss": 0.5886, + "step": 2630 + }, + { + "epoch": 0.10856647685070563, + "grad_norm": 6.062029039543245, + "learning_rate": 2.9517453974975767e-06, + "loss": 0.6247, + "step": 2631 + }, + { + "epoch": 0.10860774119006354, + "grad_norm": 5.7145037587480125, + "learning_rate": 2.9516949439648633e-06, + "loss": 0.5652, + "step": 2632 + }, + { + "epoch": 0.10864900552942147, + "grad_norm": 9.262563328119068, + "learning_rate": 2.951644464501246e-06, + "loss": 0.6384, + "step": 2633 + }, + { + "epoch": 0.1086902698687794, + "grad_norm": 3.581940073244177, + "learning_rate": 2.9515939591076274e-06, + "loss": 0.6161, + "step": 2634 + }, + { + "epoch": 0.10873153420813733, + "grad_norm": 3.4787758388959595, + "learning_rate": 2.951543427784909e-06, + "loss": 0.5252, + "step": 2635 + }, + { + "epoch": 0.10877279854749526, + "grad_norm": 4.836218553376087, + "learning_rate": 2.9514928705339943e-06, + "loss": 0.5708, + "step": 2636 + }, + { + "epoch": 0.10881406288685318, + "grad_norm": 10.326257814782242, + "learning_rate": 2.9514422873557856e-06, + "loss": 0.53, + "step": 2637 + }, + { + "epoch": 0.10885532722621111, + "grad_norm": 7.920943749921398, + "learning_rate": 2.9513916782511867e-06, + "loss": 0.6317, + "step": 2638 + }, + { + "epoch": 0.10889659156556904, + "grad_norm": 3.7548293328958438, + "learning_rate": 2.9513410432211018e-06, + "loss": 0.5422, + "step": 2639 + }, + { + "epoch": 0.10893785590492697, + "grad_norm": 10.818336285077576, + "learning_rate": 2.9512903822664347e-06, + "loss": 0.6172, + "step": 2640 + }, + { + "epoch": 0.10897912024428488, + "grad_norm": 29.518659557895408, + "learning_rate": 2.9512396953880913e-06, + "loss": 0.5714, + "step": 2641 + }, + { + "epoch": 0.10902038458364281, + "grad_norm": 3.050316069115292, + "learning_rate": 2.9511889825869763e-06, + "loss": 0.6243, + "step": 2642 + }, + { + "epoch": 0.10906164892300074, + "grad_norm": 5.968698096947627, + "learning_rate": 2.951138243863996e-06, + "loss": 0.5376, + "step": 2643 + }, + { + "epoch": 0.10910291326235867, + "grad_norm": 6.693820866466599, + "learning_rate": 2.9510874792200558e-06, + "loss": 0.6601, + "step": 2644 + }, + { + "epoch": 0.1091441776017166, + "grad_norm": 4.80412760109827, + "learning_rate": 2.9510366886560637e-06, + "loss": 0.5443, + "step": 2645 + }, + { + "epoch": 0.10918544194107452, + "grad_norm": 4.192263013265996, + "learning_rate": 2.9509858721729262e-06, + "loss": 0.5711, + "step": 2646 + }, + { + "epoch": 0.10922670628043245, + "grad_norm": 2.9215978509778795, + "learning_rate": 2.950935029771551e-06, + "loss": 0.5544, + "step": 2647 + }, + { + "epoch": 0.10926797061979038, + "grad_norm": 2.862821379229324, + "learning_rate": 2.950884161452847e-06, + "loss": 0.66, + "step": 2648 + }, + { + "epoch": 0.10930923495914831, + "grad_norm": 4.10008880562081, + "learning_rate": 2.950833267217722e-06, + "loss": 0.5186, + "step": 2649 + }, + { + "epoch": 0.10935049929850624, + "grad_norm": 18.540877675975644, + "learning_rate": 2.9507823470670856e-06, + "loss": 0.5768, + "step": 2650 + }, + { + "epoch": 0.10939176363786415, + "grad_norm": 2.814749469820161, + "learning_rate": 2.9507314010018472e-06, + "loss": 0.5492, + "step": 2651 + }, + { + "epoch": 0.10943302797722208, + "grad_norm": 2.6912840279442056, + "learning_rate": 2.9506804290229165e-06, + "loss": 0.5839, + "step": 2652 + }, + { + "epoch": 0.10947429231658001, + "grad_norm": 2.6937823345161784, + "learning_rate": 2.9506294311312044e-06, + "loss": 0.6304, + "step": 2653 + }, + { + "epoch": 0.10951555665593794, + "grad_norm": 3.062620408094541, + "learning_rate": 2.950578407327622e-06, + "loss": 0.5692, + "step": 2654 + }, + { + "epoch": 0.10955682099529586, + "grad_norm": 2.9853819490126856, + "learning_rate": 2.9505273576130803e-06, + "loss": 0.5508, + "step": 2655 + }, + { + "epoch": 0.10959808533465379, + "grad_norm": 4.403630406043513, + "learning_rate": 2.9504762819884915e-06, + "loss": 0.576, + "step": 2656 + }, + { + "epoch": 0.10963934967401172, + "grad_norm": 8.453998346076876, + "learning_rate": 2.9504251804547674e-06, + "loss": 0.5862, + "step": 2657 + }, + { + "epoch": 0.10968061401336965, + "grad_norm": 5.920330901872824, + "learning_rate": 2.9503740530128215e-06, + "loss": 0.5524, + "step": 2658 + }, + { + "epoch": 0.10972187835272758, + "grad_norm": 2.811950087358957, + "learning_rate": 2.950322899663566e-06, + "loss": 0.537, + "step": 2659 + }, + { + "epoch": 0.1097631426920855, + "grad_norm": 6.686841436515477, + "learning_rate": 2.950271720407916e-06, + "loss": 0.5323, + "step": 2660 + }, + { + "epoch": 0.10980440703144342, + "grad_norm": 3.2312117770175894, + "learning_rate": 2.9502205152467857e-06, + "loss": 0.5376, + "step": 2661 + }, + { + "epoch": 0.10984567137080135, + "grad_norm": 4.75551600121082, + "learning_rate": 2.9501692841810877e-06, + "loss": 0.5785, + "step": 2662 + }, + { + "epoch": 0.10988693571015928, + "grad_norm": 3.8930334102136124, + "learning_rate": 2.95011802721174e-06, + "loss": 0.6045, + "step": 2663 + }, + { + "epoch": 0.10992820004951721, + "grad_norm": 3.463593067255604, + "learning_rate": 2.9500667443396555e-06, + "loss": 0.5739, + "step": 2664 + }, + { + "epoch": 0.10996946438887513, + "grad_norm": 2.6519720811284713, + "learning_rate": 2.950015435565752e-06, + "loss": 0.6016, + "step": 2665 + }, + { + "epoch": 0.11001072872823306, + "grad_norm": 5.422818196614996, + "learning_rate": 2.9499641008909448e-06, + "loss": 0.5717, + "step": 2666 + }, + { + "epoch": 0.11005199306759099, + "grad_norm": 3.1218654595356417, + "learning_rate": 2.9499127403161525e-06, + "loss": 0.5707, + "step": 2667 + }, + { + "epoch": 0.11009325740694892, + "grad_norm": 14.084793222222606, + "learning_rate": 2.949861353842291e-06, + "loss": 0.533, + "step": 2668 + }, + { + "epoch": 0.11013452174630683, + "grad_norm": 6.094817234443564, + "learning_rate": 2.9498099414702787e-06, + "loss": 0.5516, + "step": 2669 + }, + { + "epoch": 0.11017578608566476, + "grad_norm": 3.960777793722065, + "learning_rate": 2.949758503201034e-06, + "loss": 0.5503, + "step": 2670 + }, + { + "epoch": 0.1102170504250227, + "grad_norm": 3.321474317346649, + "learning_rate": 2.9497070390354756e-06, + "loss": 0.5863, + "step": 2671 + }, + { + "epoch": 0.11025831476438062, + "grad_norm": 4.106064955494375, + "learning_rate": 2.949655548974523e-06, + "loss": 0.5798, + "step": 2672 + }, + { + "epoch": 0.11029957910373855, + "grad_norm": 13.445153365763097, + "learning_rate": 2.9496040330190957e-06, + "loss": 0.5474, + "step": 2673 + }, + { + "epoch": 0.11034084344309647, + "grad_norm": 3.910807980746304, + "learning_rate": 2.949552491170114e-06, + "loss": 0.5964, + "step": 2674 + }, + { + "epoch": 0.1103821077824544, + "grad_norm": 2.64717863746778, + "learning_rate": 2.9495009234284987e-06, + "loss": 0.5736, + "step": 2675 + }, + { + "epoch": 0.11042337212181233, + "grad_norm": 3.035226527072817, + "learning_rate": 2.949449329795171e-06, + "loss": 0.552, + "step": 2676 + }, + { + "epoch": 0.11046463646117026, + "grad_norm": 3.9930191969537265, + "learning_rate": 2.949397710271052e-06, + "loss": 0.5298, + "step": 2677 + }, + { + "epoch": 0.11050590080052819, + "grad_norm": 7.4017416524414354, + "learning_rate": 2.9493460648570636e-06, + "loss": 0.5184, + "step": 2678 + }, + { + "epoch": 0.1105471651398861, + "grad_norm": 4.719044100954653, + "learning_rate": 2.94929439355413e-06, + "loss": 0.536, + "step": 2679 + }, + { + "epoch": 0.11058842947924404, + "grad_norm": 5.052263098378739, + "learning_rate": 2.949242696363172e-06, + "loss": 0.5546, + "step": 2680 + }, + { + "epoch": 0.11062969381860197, + "grad_norm": 4.579981516200151, + "learning_rate": 2.949190973285114e-06, + "loss": 0.5256, + "step": 2681 + }, + { + "epoch": 0.1106709581579599, + "grad_norm": 2.8321443873411196, + "learning_rate": 2.9491392243208805e-06, + "loss": 0.5364, + "step": 2682 + }, + { + "epoch": 0.11071222249731782, + "grad_norm": 5.645644724640739, + "learning_rate": 2.9490874494713945e-06, + "loss": 0.561, + "step": 2683 + }, + { + "epoch": 0.11075348683667574, + "grad_norm": 1.9284594612270105, + "learning_rate": 2.949035648737582e-06, + "loss": 0.5309, + "step": 2684 + }, + { + "epoch": 0.11079475117603367, + "grad_norm": 8.793172495291039, + "learning_rate": 2.9489838221203685e-06, + "loss": 0.5964, + "step": 2685 + }, + { + "epoch": 0.1108360155153916, + "grad_norm": 221.09661648457822, + "learning_rate": 2.9489319696206783e-06, + "loss": 0.6354, + "step": 2686 + }, + { + "epoch": 0.11087727985474953, + "grad_norm": 5.352949185415113, + "learning_rate": 2.948880091239439e-06, + "loss": 0.6234, + "step": 2687 + }, + { + "epoch": 0.11091854419410745, + "grad_norm": 3.1088888145267757, + "learning_rate": 2.948828186977576e-06, + "loss": 0.5527, + "step": 2688 + }, + { + "epoch": 0.11095980853346538, + "grad_norm": 5.991684354367466, + "learning_rate": 2.9487762568360176e-06, + "loss": 0.531, + "step": 2689 + }, + { + "epoch": 0.1110010728728233, + "grad_norm": 12.061663447544595, + "learning_rate": 2.948724300815691e-06, + "loss": 0.6293, + "step": 2690 + }, + { + "epoch": 0.11104233721218124, + "grad_norm": 3.956659061670187, + "learning_rate": 2.9486723189175243e-06, + "loss": 0.5984, + "step": 2691 + }, + { + "epoch": 0.11108360155153917, + "grad_norm": 7.171945401512702, + "learning_rate": 2.9486203111424456e-06, + "loss": 0.5768, + "step": 2692 + }, + { + "epoch": 0.11112486589089708, + "grad_norm": 5.633009196414145, + "learning_rate": 2.9485682774913846e-06, + "loss": 0.5609, + "step": 2693 + }, + { + "epoch": 0.11116613023025501, + "grad_norm": 5.709318560877436, + "learning_rate": 2.9485162179652704e-06, + "loss": 0.5707, + "step": 2694 + }, + { + "epoch": 0.11120739456961294, + "grad_norm": 3.4388156724113523, + "learning_rate": 2.948464132565033e-06, + "loss": 0.6297, + "step": 2695 + }, + { + "epoch": 0.11124865890897087, + "grad_norm": 3.794604283089568, + "learning_rate": 2.9484120212916024e-06, + "loss": 0.5515, + "step": 2696 + }, + { + "epoch": 0.1112899232483288, + "grad_norm": 9.363254639817537, + "learning_rate": 2.9483598841459097e-06, + "loss": 0.5782, + "step": 2697 + }, + { + "epoch": 0.11133118758768672, + "grad_norm": 3.5376776007750195, + "learning_rate": 2.9483077211288864e-06, + "loss": 0.5427, + "step": 2698 + }, + { + "epoch": 0.11137245192704465, + "grad_norm": 4.296873495053997, + "learning_rate": 2.948255532241464e-06, + "loss": 0.5176, + "step": 2699 + }, + { + "epoch": 0.11141371626640258, + "grad_norm": 4.640062123082645, + "learning_rate": 2.948203317484575e-06, + "loss": 0.6533, + "step": 2700 + }, + { + "epoch": 0.1114549806057605, + "grad_norm": 25.663511829647746, + "learning_rate": 2.948151076859152e-06, + "loss": 0.6227, + "step": 2701 + }, + { + "epoch": 0.11149624494511842, + "grad_norm": 5.630607680834061, + "learning_rate": 2.9480988103661278e-06, + "loss": 0.5592, + "step": 2702 + }, + { + "epoch": 0.11153750928447635, + "grad_norm": 3.931718918445299, + "learning_rate": 2.948046518006436e-06, + "loss": 0.5789, + "step": 2703 + }, + { + "epoch": 0.11157877362383428, + "grad_norm": 13.05607275044027, + "learning_rate": 2.947994199781011e-06, + "loss": 0.5191, + "step": 2704 + }, + { + "epoch": 0.11162003796319221, + "grad_norm": 4.834672027270376, + "learning_rate": 2.9479418556907876e-06, + "loss": 0.6201, + "step": 2705 + }, + { + "epoch": 0.11166130230255014, + "grad_norm": 2.787011256623559, + "learning_rate": 2.9478894857367006e-06, + "loss": 0.506, + "step": 2706 + }, + { + "epoch": 0.11170256664190806, + "grad_norm": 3.124385208139324, + "learning_rate": 2.947837089919685e-06, + "loss": 0.5925, + "step": 2707 + }, + { + "epoch": 0.11174383098126599, + "grad_norm": 3.6312245662364218, + "learning_rate": 2.947784668240677e-06, + "loss": 0.586, + "step": 2708 + }, + { + "epoch": 0.11178509532062392, + "grad_norm": 4.18826325211283, + "learning_rate": 2.947732220700613e-06, + "loss": 0.5547, + "step": 2709 + }, + { + "epoch": 0.11182635965998185, + "grad_norm": 5.61093587614441, + "learning_rate": 2.94767974730043e-06, + "loss": 0.5312, + "step": 2710 + }, + { + "epoch": 0.11186762399933978, + "grad_norm": 3.443314227246671, + "learning_rate": 2.947627248041065e-06, + "loss": 0.6122, + "step": 2711 + }, + { + "epoch": 0.11190888833869769, + "grad_norm": 5.557741964889133, + "learning_rate": 2.947574722923456e-06, + "loss": 0.6195, + "step": 2712 + }, + { + "epoch": 0.11195015267805562, + "grad_norm": 9.270831105217244, + "learning_rate": 2.9475221719485416e-06, + "loss": 0.6024, + "step": 2713 + }, + { + "epoch": 0.11199141701741355, + "grad_norm": 10.880142288541856, + "learning_rate": 2.9474695951172595e-06, + "loss": 0.5984, + "step": 2714 + }, + { + "epoch": 0.11203268135677148, + "grad_norm": 3.0371064760465556, + "learning_rate": 2.94741699243055e-06, + "loss": 0.5522, + "step": 2715 + }, + { + "epoch": 0.1120739456961294, + "grad_norm": 4.873359361813788, + "learning_rate": 2.9473643638893515e-06, + "loss": 0.5914, + "step": 2716 + }, + { + "epoch": 0.11211521003548733, + "grad_norm": 65.15841375849449, + "learning_rate": 2.947311709494605e-06, + "loss": 0.5706, + "step": 2717 + }, + { + "epoch": 0.11215647437484526, + "grad_norm": 3.5982543556477284, + "learning_rate": 2.9472590292472507e-06, + "loss": 0.5942, + "step": 2718 + }, + { + "epoch": 0.11219773871420319, + "grad_norm": 2.6082363396591326, + "learning_rate": 2.9472063231482296e-06, + "loss": 0.546, + "step": 2719 + }, + { + "epoch": 0.11223900305356112, + "grad_norm": 5.118100190023344, + "learning_rate": 2.9471535911984834e-06, + "loss": 0.5584, + "step": 2720 + }, + { + "epoch": 0.11228026739291903, + "grad_norm": 4.34352668795643, + "learning_rate": 2.947100833398954e-06, + "loss": 0.5701, + "step": 2721 + }, + { + "epoch": 0.11232153173227696, + "grad_norm": 3.547058568337212, + "learning_rate": 2.947048049750583e-06, + "loss": 0.564, + "step": 2722 + }, + { + "epoch": 0.1123627960716349, + "grad_norm": 3.967448874763688, + "learning_rate": 2.9469952402543144e-06, + "loss": 0.4996, + "step": 2723 + }, + { + "epoch": 0.11240406041099282, + "grad_norm": 2.981869086999141, + "learning_rate": 2.946942404911091e-06, + "loss": 0.5392, + "step": 2724 + }, + { + "epoch": 0.11244532475035075, + "grad_norm": 4.351261953753251, + "learning_rate": 2.946889543721856e-06, + "loss": 0.5184, + "step": 2725 + }, + { + "epoch": 0.11248658908970867, + "grad_norm": 21.170604084919646, + "learning_rate": 2.9468366566875547e-06, + "loss": 0.5589, + "step": 2726 + }, + { + "epoch": 0.1125278534290666, + "grad_norm": 4.485745688233924, + "learning_rate": 2.946783743809131e-06, + "loss": 0.5676, + "step": 2727 + }, + { + "epoch": 0.11256911776842453, + "grad_norm": 4.697641823017379, + "learning_rate": 2.9467308050875306e-06, + "loss": 0.6052, + "step": 2728 + }, + { + "epoch": 0.11261038210778246, + "grad_norm": 4.9445552421948165, + "learning_rate": 2.9466778405236986e-06, + "loss": 0.5501, + "step": 2729 + }, + { + "epoch": 0.11265164644714037, + "grad_norm": 5.840883673442123, + "learning_rate": 2.9466248501185817e-06, + "loss": 0.5983, + "step": 2730 + }, + { + "epoch": 0.1126929107864983, + "grad_norm": 2.8645880520620093, + "learning_rate": 2.946571833873126e-06, + "loss": 0.5306, + "step": 2731 + }, + { + "epoch": 0.11273417512585623, + "grad_norm": 3.3718958534871946, + "learning_rate": 2.9465187917882782e-06, + "loss": 0.5295, + "step": 2732 + }, + { + "epoch": 0.11277543946521416, + "grad_norm": 3.8123802428442373, + "learning_rate": 2.946465723864986e-06, + "loss": 0.6139, + "step": 2733 + }, + { + "epoch": 0.1128167038045721, + "grad_norm": 6.961267137216574, + "learning_rate": 2.946412630104198e-06, + "loss": 0.5328, + "step": 2734 + }, + { + "epoch": 0.11285796814393001, + "grad_norm": 15.7316439083673, + "learning_rate": 2.9463595105068617e-06, + "loss": 0.5326, + "step": 2735 + }, + { + "epoch": 0.11289923248328794, + "grad_norm": 2.741498414769772, + "learning_rate": 2.9463063650739267e-06, + "loss": 0.6013, + "step": 2736 + }, + { + "epoch": 0.11294049682264587, + "grad_norm": 6.963332851339552, + "learning_rate": 2.946253193806342e-06, + "loss": 0.5213, + "step": 2737 + }, + { + "epoch": 0.1129817611620038, + "grad_norm": 26.816265585476145, + "learning_rate": 2.9461999967050565e-06, + "loss": 0.5505, + "step": 2738 + }, + { + "epoch": 0.11302302550136173, + "grad_norm": 10.953922454642065, + "learning_rate": 2.9461467737710217e-06, + "loss": 0.5937, + "step": 2739 + }, + { + "epoch": 0.11306428984071965, + "grad_norm": 2.6310673216099056, + "learning_rate": 2.946093525005188e-06, + "loss": 0.5572, + "step": 2740 + }, + { + "epoch": 0.11310555418007757, + "grad_norm": 4.646848142350671, + "learning_rate": 2.946040250408506e-06, + "loss": 0.6038, + "step": 2741 + }, + { + "epoch": 0.1131468185194355, + "grad_norm": 3.7063468608734045, + "learning_rate": 2.945986949981928e-06, + "loss": 0.5858, + "step": 2742 + }, + { + "epoch": 0.11318808285879343, + "grad_norm": 3.6224156816312156, + "learning_rate": 2.945933623726406e-06, + "loss": 0.5752, + "step": 2743 + }, + { + "epoch": 0.11322934719815136, + "grad_norm": 4.66597115766867, + "learning_rate": 2.945880271642892e-06, + "loss": 0.5826, + "step": 2744 + }, + { + "epoch": 0.11327061153750928, + "grad_norm": 5.081227771671317, + "learning_rate": 2.945826893732339e-06, + "loss": 0.5986, + "step": 2745 + }, + { + "epoch": 0.11331187587686721, + "grad_norm": 4.399954577614027, + "learning_rate": 2.945773489995701e-06, + "loss": 0.549, + "step": 2746 + }, + { + "epoch": 0.11335314021622514, + "grad_norm": 3.442434380949336, + "learning_rate": 2.9457200604339315e-06, + "loss": 0.6033, + "step": 2747 + }, + { + "epoch": 0.11339440455558307, + "grad_norm": 2.4933060204769086, + "learning_rate": 2.9456666050479856e-06, + "loss": 0.5448, + "step": 2748 + }, + { + "epoch": 0.11343566889494099, + "grad_norm": 7.81945802459763, + "learning_rate": 2.9456131238388172e-06, + "loss": 0.529, + "step": 2749 + }, + { + "epoch": 0.11347693323429892, + "grad_norm": 3.6579828804472037, + "learning_rate": 2.9455596168073823e-06, + "loss": 0.5687, + "step": 2750 + }, + { + "epoch": 0.11351819757365685, + "grad_norm": 3.092365669568379, + "learning_rate": 2.9455060839546364e-06, + "loss": 0.5798, + "step": 2751 + }, + { + "epoch": 0.11355946191301478, + "grad_norm": 4.992082458463891, + "learning_rate": 2.9454525252815356e-06, + "loss": 0.6256, + "step": 2752 + }, + { + "epoch": 0.1136007262523727, + "grad_norm": 3.2033142099797542, + "learning_rate": 2.9453989407890364e-06, + "loss": 0.5724, + "step": 2753 + }, + { + "epoch": 0.11364199059173062, + "grad_norm": 3.1293071734291997, + "learning_rate": 2.945345330478097e-06, + "loss": 0.5571, + "step": 2754 + }, + { + "epoch": 0.11368325493108855, + "grad_norm": 10.065149590984289, + "learning_rate": 2.945291694349674e-06, + "loss": 0.6017, + "step": 2755 + }, + { + "epoch": 0.11372451927044648, + "grad_norm": 3.8429289425098627, + "learning_rate": 2.9452380324047256e-06, + "loss": 0.5591, + "step": 2756 + }, + { + "epoch": 0.11376578360980441, + "grad_norm": 4.054830582045594, + "learning_rate": 2.9451843446442106e-06, + "loss": 0.5707, + "step": 2757 + }, + { + "epoch": 0.11380704794916234, + "grad_norm": 3.689915994067349, + "learning_rate": 2.945130631069088e-06, + "loss": 0.6232, + "step": 2758 + }, + { + "epoch": 0.11384831228852026, + "grad_norm": 3.1855048363373597, + "learning_rate": 2.945076891680317e-06, + "loss": 0.5419, + "step": 2759 + }, + { + "epoch": 0.11388957662787819, + "grad_norm": 7.274625527901669, + "learning_rate": 2.9450231264788582e-06, + "loss": 0.5725, + "step": 2760 + }, + { + "epoch": 0.11393084096723612, + "grad_norm": 2.5666025636808802, + "learning_rate": 2.9449693354656708e-06, + "loss": 0.5095, + "step": 2761 + }, + { + "epoch": 0.11397210530659405, + "grad_norm": 3.7106604549451254, + "learning_rate": 2.9449155186417165e-06, + "loss": 0.6224, + "step": 2762 + }, + { + "epoch": 0.11401336964595196, + "grad_norm": 7.497127783028766, + "learning_rate": 2.9448616760079565e-06, + "loss": 0.4773, + "step": 2763 + }, + { + "epoch": 0.11405463398530989, + "grad_norm": 2.5615575080181063, + "learning_rate": 2.9448078075653524e-06, + "loss": 0.5955, + "step": 2764 + }, + { + "epoch": 0.11409589832466782, + "grad_norm": 8.04654086344676, + "learning_rate": 2.9447539133148665e-06, + "loss": 0.5862, + "step": 2765 + }, + { + "epoch": 0.11413716266402575, + "grad_norm": 8.041448237969608, + "learning_rate": 2.944699993257462e-06, + "loss": 0.5893, + "step": 2766 + }, + { + "epoch": 0.11417842700338368, + "grad_norm": 4.097233293901653, + "learning_rate": 2.9446460473941007e-06, + "loss": 0.5863, + "step": 2767 + }, + { + "epoch": 0.1142196913427416, + "grad_norm": 4.336720252451565, + "learning_rate": 2.9445920757257475e-06, + "loss": 0.5734, + "step": 2768 + }, + { + "epoch": 0.11426095568209953, + "grad_norm": 6.034949899456396, + "learning_rate": 2.944538078253366e-06, + "loss": 0.604, + "step": 2769 + }, + { + "epoch": 0.11430222002145746, + "grad_norm": 3.8887487502004996, + "learning_rate": 2.9444840549779206e-06, + "loss": 0.5817, + "step": 2770 + }, + { + "epoch": 0.11434348436081539, + "grad_norm": 4.781485616599006, + "learning_rate": 2.9444300059003766e-06, + "loss": 0.6451, + "step": 2771 + }, + { + "epoch": 0.11438474870017332, + "grad_norm": 3.4263151782289607, + "learning_rate": 2.944375931021699e-06, + "loss": 0.5895, + "step": 2772 + }, + { + "epoch": 0.11442601303953123, + "grad_norm": 12.375501785814908, + "learning_rate": 2.9443218303428545e-06, + "loss": 0.5721, + "step": 2773 + }, + { + "epoch": 0.11446727737888916, + "grad_norm": 3.74517750903601, + "learning_rate": 2.9442677038648085e-06, + "loss": 0.538, + "step": 2774 + }, + { + "epoch": 0.11450854171824709, + "grad_norm": 4.708643061906765, + "learning_rate": 2.944213551588528e-06, + "loss": 0.5865, + "step": 2775 + }, + { + "epoch": 0.11454980605760502, + "grad_norm": 3.1375261369249228, + "learning_rate": 2.9441593735149812e-06, + "loss": 0.578, + "step": 2776 + }, + { + "epoch": 0.11459107039696294, + "grad_norm": 2.896000074080242, + "learning_rate": 2.9441051696451354e-06, + "loss": 0.5843, + "step": 2777 + }, + { + "epoch": 0.11463233473632087, + "grad_norm": 3.602277264524284, + "learning_rate": 2.9440509399799586e-06, + "loss": 0.5805, + "step": 2778 + }, + { + "epoch": 0.1146735990756788, + "grad_norm": 2.5063397690389495, + "learning_rate": 2.943996684520419e-06, + "loss": 0.5523, + "step": 2779 + }, + { + "epoch": 0.11471486341503673, + "grad_norm": 3.139145754816793, + "learning_rate": 2.9439424032674867e-06, + "loss": 0.5635, + "step": 2780 + }, + { + "epoch": 0.11475612775439466, + "grad_norm": 3.0816841022190093, + "learning_rate": 2.943888096222131e-06, + "loss": 0.5774, + "step": 2781 + }, + { + "epoch": 0.11479739209375257, + "grad_norm": 3.3748997415759643, + "learning_rate": 2.9438337633853215e-06, + "loss": 0.5785, + "step": 2782 + }, + { + "epoch": 0.1148386564331105, + "grad_norm": 12.589522620363292, + "learning_rate": 2.943779404758029e-06, + "loss": 0.5398, + "step": 2783 + }, + { + "epoch": 0.11487992077246843, + "grad_norm": 5.412731125946049, + "learning_rate": 2.943725020341225e-06, + "loss": 0.6009, + "step": 2784 + }, + { + "epoch": 0.11492118511182636, + "grad_norm": 2.3873469664218296, + "learning_rate": 2.9436706101358804e-06, + "loss": 0.5326, + "step": 2785 + }, + { + "epoch": 0.11496244945118429, + "grad_norm": 2.295907951721444, + "learning_rate": 2.943616174142967e-06, + "loss": 0.5967, + "step": 2786 + }, + { + "epoch": 0.11500371379054221, + "grad_norm": 6.06480207757081, + "learning_rate": 2.943561712363457e-06, + "loss": 0.5476, + "step": 2787 + }, + { + "epoch": 0.11504497812990014, + "grad_norm": 3.679798042822989, + "learning_rate": 2.943507224798324e-06, + "loss": 0.5968, + "step": 2788 + }, + { + "epoch": 0.11508624246925807, + "grad_norm": 7.1106223742233565, + "learning_rate": 2.943452711448541e-06, + "loss": 0.607, + "step": 2789 + }, + { + "epoch": 0.115127506808616, + "grad_norm": 3.4085155598663777, + "learning_rate": 2.9433981723150816e-06, + "loss": 0.5364, + "step": 2790 + }, + { + "epoch": 0.11516877114797391, + "grad_norm": 3.0617088126284173, + "learning_rate": 2.9433436073989196e-06, + "loss": 0.5562, + "step": 2791 + }, + { + "epoch": 0.11521003548733184, + "grad_norm": 3.2559289651350376, + "learning_rate": 2.9432890167010305e-06, + "loss": 0.5651, + "step": 2792 + }, + { + "epoch": 0.11525129982668977, + "grad_norm": 3.717513148371589, + "learning_rate": 2.943234400222389e-06, + "loss": 0.5465, + "step": 2793 + }, + { + "epoch": 0.1152925641660477, + "grad_norm": 4.6709994670277775, + "learning_rate": 2.9431797579639703e-06, + "loss": 0.616, + "step": 2794 + }, + { + "epoch": 0.11533382850540563, + "grad_norm": 4.913632320187799, + "learning_rate": 2.943125089926751e-06, + "loss": 0.5783, + "step": 2795 + }, + { + "epoch": 0.11537509284476355, + "grad_norm": 2.8504439674935487, + "learning_rate": 2.943070396111708e-06, + "loss": 0.5675, + "step": 2796 + }, + { + "epoch": 0.11541635718412148, + "grad_norm": 5.991602221341902, + "learning_rate": 2.9430156765198175e-06, + "loss": 0.545, + "step": 2797 + }, + { + "epoch": 0.11545762152347941, + "grad_norm": 3.338614417059268, + "learning_rate": 2.9429609311520567e-06, + "loss": 0.5322, + "step": 2798 + }, + { + "epoch": 0.11549888586283734, + "grad_norm": 4.033551029015933, + "learning_rate": 2.942906160009404e-06, + "loss": 0.6346, + "step": 2799 + }, + { + "epoch": 0.11554015020219527, + "grad_norm": 4.977062799315068, + "learning_rate": 2.942851363092838e-06, + "loss": 0.5881, + "step": 2800 + }, + { + "epoch": 0.11558141454155318, + "grad_norm": 5.3698437289748915, + "learning_rate": 2.9427965404033365e-06, + "loss": 0.5881, + "step": 2801 + }, + { + "epoch": 0.11562267888091111, + "grad_norm": 2.2876669367434737, + "learning_rate": 2.9427416919418802e-06, + "loss": 0.5411, + "step": 2802 + }, + { + "epoch": 0.11566394322026904, + "grad_norm": 10.21970954263593, + "learning_rate": 2.942686817709448e-06, + "loss": 0.5889, + "step": 2803 + }, + { + "epoch": 0.11570520755962697, + "grad_norm": 3.975799126638472, + "learning_rate": 2.9426319177070197e-06, + "loss": 0.5821, + "step": 2804 + }, + { + "epoch": 0.1157464718989849, + "grad_norm": 5.3982125661673965, + "learning_rate": 2.942576991935577e-06, + "loss": 0.6161, + "step": 2805 + }, + { + "epoch": 0.11578773623834282, + "grad_norm": 4.240582423069029, + "learning_rate": 2.9425220403961002e-06, + "loss": 0.5747, + "step": 2806 + }, + { + "epoch": 0.11582900057770075, + "grad_norm": 14.055656861619026, + "learning_rate": 2.9424670630895715e-06, + "loss": 0.5526, + "step": 2807 + }, + { + "epoch": 0.11587026491705868, + "grad_norm": 3.598583201297834, + "learning_rate": 2.942412060016972e-06, + "loss": 0.5706, + "step": 2808 + }, + { + "epoch": 0.11591152925641661, + "grad_norm": 4.011237123087078, + "learning_rate": 2.9423570311792848e-06, + "loss": 0.589, + "step": 2809 + }, + { + "epoch": 0.11595279359577453, + "grad_norm": 2.949683863204426, + "learning_rate": 2.9423019765774932e-06, + "loss": 0.5845, + "step": 2810 + }, + { + "epoch": 0.11599405793513246, + "grad_norm": 20.047379743643447, + "learning_rate": 2.94224689621258e-06, + "loss": 0.6358, + "step": 2811 + }, + { + "epoch": 0.11603532227449039, + "grad_norm": 5.048305837508557, + "learning_rate": 2.942191790085529e-06, + "loss": 0.6015, + "step": 2812 + }, + { + "epoch": 0.11607658661384831, + "grad_norm": 2.8851955538510627, + "learning_rate": 2.9421366581973256e-06, + "loss": 0.5968, + "step": 2813 + }, + { + "epoch": 0.11611785095320624, + "grad_norm": 3.1634524837865774, + "learning_rate": 2.9420815005489527e-06, + "loss": 0.5876, + "step": 2814 + }, + { + "epoch": 0.11615911529256416, + "grad_norm": 11.640321527083437, + "learning_rate": 2.9420263171413978e-06, + "loss": 0.5615, + "step": 2815 + }, + { + "epoch": 0.11620037963192209, + "grad_norm": 4.702731194605271, + "learning_rate": 2.941971107975645e-06, + "loss": 0.5402, + "step": 2816 + }, + { + "epoch": 0.11624164397128002, + "grad_norm": 60.03464129353783, + "learning_rate": 2.941915873052681e-06, + "loss": 0.5745, + "step": 2817 + }, + { + "epoch": 0.11628290831063795, + "grad_norm": 5.02436477162361, + "learning_rate": 2.941860612373492e-06, + "loss": 0.5984, + "step": 2818 + }, + { + "epoch": 0.11632417264999588, + "grad_norm": 4.044666091044659, + "learning_rate": 2.9418053259390657e-06, + "loss": 0.5431, + "step": 2819 + }, + { + "epoch": 0.1163654369893538, + "grad_norm": 2.867341083625145, + "learning_rate": 2.9417500137503892e-06, + "loss": 0.5858, + "step": 2820 + }, + { + "epoch": 0.11640670132871173, + "grad_norm": 16.040679315249744, + "learning_rate": 2.941694675808451e-06, + "loss": 0.553, + "step": 2821 + }, + { + "epoch": 0.11644796566806966, + "grad_norm": 4.589355649443433, + "learning_rate": 2.9416393121142396e-06, + "loss": 0.6359, + "step": 2822 + }, + { + "epoch": 0.11648923000742759, + "grad_norm": 5.077864816820983, + "learning_rate": 2.9415839226687434e-06, + "loss": 0.6005, + "step": 2823 + }, + { + "epoch": 0.1165304943467855, + "grad_norm": 2.4228581070391146, + "learning_rate": 2.941528507472952e-06, + "loss": 0.6154, + "step": 2824 + }, + { + "epoch": 0.11657175868614343, + "grad_norm": 3.7752731714935384, + "learning_rate": 2.9414730665278553e-06, + "loss": 0.5486, + "step": 2825 + }, + { + "epoch": 0.11661302302550136, + "grad_norm": 6.825930181368617, + "learning_rate": 2.941417599834444e-06, + "loss": 0.5194, + "step": 2826 + }, + { + "epoch": 0.11665428736485929, + "grad_norm": 6.443793701109901, + "learning_rate": 2.941362107393708e-06, + "loss": 0.5712, + "step": 2827 + }, + { + "epoch": 0.11669555170421722, + "grad_norm": 3.7003784686409875, + "learning_rate": 2.9413065892066394e-06, + "loss": 0.5411, + "step": 2828 + }, + { + "epoch": 0.11673681604357514, + "grad_norm": 2.409224756535315, + "learning_rate": 2.941251045274229e-06, + "loss": 0.5817, + "step": 2829 + }, + { + "epoch": 0.11677808038293307, + "grad_norm": 4.1641138621421225, + "learning_rate": 2.94119547559747e-06, + "loss": 0.5169, + "step": 2830 + }, + { + "epoch": 0.116819344722291, + "grad_norm": 2.8651663180747025, + "learning_rate": 2.9411398801773542e-06, + "loss": 0.5656, + "step": 2831 + }, + { + "epoch": 0.11686060906164893, + "grad_norm": 4.339879860063301, + "learning_rate": 2.941084259014875e-06, + "loss": 0.5665, + "step": 2832 + }, + { + "epoch": 0.11690187340100686, + "grad_norm": 3.1689439118347607, + "learning_rate": 2.941028612111026e-06, + "loss": 0.5675, + "step": 2833 + }, + { + "epoch": 0.11694313774036477, + "grad_norm": 4.441710386486827, + "learning_rate": 2.940972939466801e-06, + "loss": 0.5791, + "step": 2834 + }, + { + "epoch": 0.1169844020797227, + "grad_norm": 3.001090841397053, + "learning_rate": 2.9409172410831946e-06, + "loss": 0.5753, + "step": 2835 + }, + { + "epoch": 0.11702566641908063, + "grad_norm": 4.19310875022227, + "learning_rate": 2.940861516961202e-06, + "loss": 0.5982, + "step": 2836 + }, + { + "epoch": 0.11706693075843856, + "grad_norm": 4.355861724801081, + "learning_rate": 2.9408057671018173e-06, + "loss": 0.5664, + "step": 2837 + }, + { + "epoch": 0.11710819509779648, + "grad_norm": 4.530103129828478, + "learning_rate": 2.940749991506038e-06, + "loss": 0.5746, + "step": 2838 + }, + { + "epoch": 0.11714945943715441, + "grad_norm": 3.3996796958504834, + "learning_rate": 2.9406941901748597e-06, + "loss": 0.5827, + "step": 2839 + }, + { + "epoch": 0.11719072377651234, + "grad_norm": 4.026032857638392, + "learning_rate": 2.9406383631092785e-06, + "loss": 0.632, + "step": 2840 + }, + { + "epoch": 0.11723198811587027, + "grad_norm": 3.182943386051508, + "learning_rate": 2.940582510310293e-06, + "loss": 0.5719, + "step": 2841 + }, + { + "epoch": 0.1172732524552282, + "grad_norm": 2.222747946287702, + "learning_rate": 2.9405266317788995e-06, + "loss": 0.522, + "step": 2842 + }, + { + "epoch": 0.11731451679458611, + "grad_norm": 4.454475082912154, + "learning_rate": 2.9404707275160966e-06, + "loss": 0.5942, + "step": 2843 + }, + { + "epoch": 0.11735578113394404, + "grad_norm": 4.4152635671975275, + "learning_rate": 2.940414797522883e-06, + "loss": 0.5787, + "step": 2844 + }, + { + "epoch": 0.11739704547330197, + "grad_norm": 3.0007211100603177, + "learning_rate": 2.9403588418002584e-06, + "loss": 0.5369, + "step": 2845 + }, + { + "epoch": 0.1174383098126599, + "grad_norm": 5.963188608323932, + "learning_rate": 2.9403028603492213e-06, + "loss": 0.5854, + "step": 2846 + }, + { + "epoch": 0.11747957415201783, + "grad_norm": 4.348529881816506, + "learning_rate": 2.940246853170772e-06, + "loss": 0.5432, + "step": 2847 + }, + { + "epoch": 0.11752083849137575, + "grad_norm": 4.499362031298466, + "learning_rate": 2.9401908202659103e-06, + "loss": 0.5558, + "step": 2848 + }, + { + "epoch": 0.11756210283073368, + "grad_norm": 4.486789388869214, + "learning_rate": 2.9401347616356387e-06, + "loss": 0.6124, + "step": 2849 + }, + { + "epoch": 0.11760336717009161, + "grad_norm": 5.763837058009762, + "learning_rate": 2.9400786772809575e-06, + "loss": 0.6157, + "step": 2850 + }, + { + "epoch": 0.11764463150944954, + "grad_norm": 3.051269574937235, + "learning_rate": 2.9400225672028682e-06, + "loss": 0.5753, + "step": 2851 + }, + { + "epoch": 0.11768589584880747, + "grad_norm": 5.7585562577756475, + "learning_rate": 2.939966431402374e-06, + "loss": 0.6284, + "step": 2852 + }, + { + "epoch": 0.11772716018816538, + "grad_norm": 64.88469608136624, + "learning_rate": 2.9399102698804762e-06, + "loss": 0.626, + "step": 2853 + }, + { + "epoch": 0.11776842452752331, + "grad_norm": 3.9873979536398405, + "learning_rate": 2.9398540826381797e-06, + "loss": 0.6139, + "step": 2854 + }, + { + "epoch": 0.11780968886688124, + "grad_norm": 2.5626125161971656, + "learning_rate": 2.939797869676487e-06, + "loss": 0.5626, + "step": 2855 + }, + { + "epoch": 0.11785095320623917, + "grad_norm": 3.0889346962022652, + "learning_rate": 2.939741630996402e-06, + "loss": 0.533, + "step": 2856 + }, + { + "epoch": 0.11789221754559709, + "grad_norm": 3.373653959668929, + "learning_rate": 2.9396853665989307e-06, + "loss": 0.5466, + "step": 2857 + }, + { + "epoch": 0.11793348188495502, + "grad_norm": 5.074510231789693, + "learning_rate": 2.939629076485077e-06, + "loss": 0.6272, + "step": 2858 + }, + { + "epoch": 0.11797474622431295, + "grad_norm": 5.987477832315626, + "learning_rate": 2.9395727606558467e-06, + "loss": 0.6232, + "step": 2859 + }, + { + "epoch": 0.11801601056367088, + "grad_norm": 2.436191536371953, + "learning_rate": 2.9395164191122453e-06, + "loss": 0.5689, + "step": 2860 + }, + { + "epoch": 0.11805727490302881, + "grad_norm": 3.643920080623685, + "learning_rate": 2.9394600518552792e-06, + "loss": 0.6073, + "step": 2861 + }, + { + "epoch": 0.11809853924238672, + "grad_norm": 3.891101962177491, + "learning_rate": 2.9394036588859563e-06, + "loss": 0.5378, + "step": 2862 + }, + { + "epoch": 0.11813980358174465, + "grad_norm": 4.2556388480976866, + "learning_rate": 2.939347240205283e-06, + "loss": 0.5786, + "step": 2863 + }, + { + "epoch": 0.11818106792110258, + "grad_norm": 9.040045543053449, + "learning_rate": 2.9392907958142673e-06, + "loss": 0.6277, + "step": 2864 + }, + { + "epoch": 0.11822233226046051, + "grad_norm": 5.7955508693103175, + "learning_rate": 2.9392343257139175e-06, + "loss": 0.6338, + "step": 2865 + }, + { + "epoch": 0.11826359659981844, + "grad_norm": 4.037352007566816, + "learning_rate": 2.9391778299052416e-06, + "loss": 0.5338, + "step": 2866 + }, + { + "epoch": 0.11830486093917636, + "grad_norm": 2.617782509813739, + "learning_rate": 2.93912130838925e-06, + "loss": 0.5695, + "step": 2867 + }, + { + "epoch": 0.11834612527853429, + "grad_norm": 5.248331191910854, + "learning_rate": 2.9390647611669514e-06, + "loss": 0.5679, + "step": 2868 + }, + { + "epoch": 0.11838738961789222, + "grad_norm": 8.949664110960207, + "learning_rate": 2.939008188239357e-06, + "loss": 0.6003, + "step": 2869 + }, + { + "epoch": 0.11842865395725015, + "grad_norm": 7.367152021448665, + "learning_rate": 2.9389515896074753e-06, + "loss": 0.5293, + "step": 2870 + }, + { + "epoch": 0.11846991829660806, + "grad_norm": 2.409497889551543, + "learning_rate": 2.938894965272319e-06, + "loss": 0.5747, + "step": 2871 + }, + { + "epoch": 0.118511182635966, + "grad_norm": 4.049146954464527, + "learning_rate": 2.9388383152348995e-06, + "loss": 0.5878, + "step": 2872 + }, + { + "epoch": 0.11855244697532392, + "grad_norm": 3.0680369966821313, + "learning_rate": 2.9387816394962276e-06, + "loss": 0.5555, + "step": 2873 + }, + { + "epoch": 0.11859371131468185, + "grad_norm": 2.6057149776652624, + "learning_rate": 2.9387249380573166e-06, + "loss": 0.5431, + "step": 2874 + }, + { + "epoch": 0.11863497565403978, + "grad_norm": 11.870511366440283, + "learning_rate": 2.938668210919179e-06, + "loss": 0.6455, + "step": 2875 + }, + { + "epoch": 0.1186762399933977, + "grad_norm": 4.83567972997782, + "learning_rate": 2.9386114580828284e-06, + "loss": 0.5672, + "step": 2876 + }, + { + "epoch": 0.11871750433275563, + "grad_norm": 4.0591898335701995, + "learning_rate": 2.938554679549278e-06, + "loss": 0.5767, + "step": 2877 + }, + { + "epoch": 0.11875876867211356, + "grad_norm": 5.474934663067418, + "learning_rate": 2.9384978753195427e-06, + "loss": 0.5575, + "step": 2878 + }, + { + "epoch": 0.11880003301147149, + "grad_norm": 4.160001718968006, + "learning_rate": 2.9384410453946367e-06, + "loss": 0.5481, + "step": 2879 + }, + { + "epoch": 0.11884129735082942, + "grad_norm": 3.8774937824777047, + "learning_rate": 2.938384189775575e-06, + "loss": 0.5998, + "step": 2880 + }, + { + "epoch": 0.11888256169018734, + "grad_norm": 2.4944303449083978, + "learning_rate": 2.9383273084633735e-06, + "loss": 0.5744, + "step": 2881 + }, + { + "epoch": 0.11892382602954527, + "grad_norm": 6.874712967928458, + "learning_rate": 2.9382704014590482e-06, + "loss": 0.566, + "step": 2882 + }, + { + "epoch": 0.1189650903689032, + "grad_norm": 11.44061205894692, + "learning_rate": 2.9382134687636153e-06, + "loss": 0.5882, + "step": 2883 + }, + { + "epoch": 0.11900635470826113, + "grad_norm": 10.516128493279272, + "learning_rate": 2.938156510378092e-06, + "loss": 0.6155, + "step": 2884 + }, + { + "epoch": 0.11904761904761904, + "grad_norm": 16.29729777562391, + "learning_rate": 2.9380995263034954e-06, + "loss": 0.5824, + "step": 2885 + }, + { + "epoch": 0.11908888338697697, + "grad_norm": 12.540679973831494, + "learning_rate": 2.9380425165408442e-06, + "loss": 0.6, + "step": 2886 + }, + { + "epoch": 0.1191301477263349, + "grad_norm": 4.401493949670616, + "learning_rate": 2.9379854810911562e-06, + "loss": 0.6166, + "step": 2887 + }, + { + "epoch": 0.11917141206569283, + "grad_norm": 3.229741717185342, + "learning_rate": 2.93792841995545e-06, + "loss": 0.5822, + "step": 2888 + }, + { + "epoch": 0.11921267640505076, + "grad_norm": 3.2622079788906393, + "learning_rate": 2.9378713331347455e-06, + "loss": 0.5938, + "step": 2889 + }, + { + "epoch": 0.11925394074440868, + "grad_norm": 3.7462849202888373, + "learning_rate": 2.9378142206300616e-06, + "loss": 0.4959, + "step": 2890 + }, + { + "epoch": 0.1192952050837666, + "grad_norm": 2.9148952829897734, + "learning_rate": 2.937757082442419e-06, + "loss": 0.5504, + "step": 2891 + }, + { + "epoch": 0.11933646942312454, + "grad_norm": 3.647682685308856, + "learning_rate": 2.937699918572838e-06, + "loss": 0.5538, + "step": 2892 + }, + { + "epoch": 0.11937773376248247, + "grad_norm": 5.200764841700442, + "learning_rate": 2.93764272902234e-06, + "loss": 0.6243, + "step": 2893 + }, + { + "epoch": 0.1194189981018404, + "grad_norm": 5.819525423070449, + "learning_rate": 2.9375855137919465e-06, + "loss": 0.6306, + "step": 2894 + }, + { + "epoch": 0.11946026244119831, + "grad_norm": 4.598130727655307, + "learning_rate": 2.9375282728826794e-06, + "loss": 0.6241, + "step": 2895 + }, + { + "epoch": 0.11950152678055624, + "grad_norm": 18.96180883224759, + "learning_rate": 2.937471006295561e-06, + "loss": 0.55, + "step": 2896 + }, + { + "epoch": 0.11954279111991417, + "grad_norm": 3.138437717563164, + "learning_rate": 2.9374137140316147e-06, + "loss": 0.5717, + "step": 2897 + }, + { + "epoch": 0.1195840554592721, + "grad_norm": 6.356345089395851, + "learning_rate": 2.9373563960918635e-06, + "loss": 0.5844, + "step": 2898 + }, + { + "epoch": 0.11962531979863002, + "grad_norm": 3.130540617547414, + "learning_rate": 2.937299052477331e-06, + "loss": 0.5935, + "step": 2899 + }, + { + "epoch": 0.11966658413798795, + "grad_norm": 3.0518322292023536, + "learning_rate": 2.9372416831890422e-06, + "loss": 0.5437, + "step": 2900 + }, + { + "epoch": 0.11970784847734588, + "grad_norm": 5.053253815790647, + "learning_rate": 2.9371842882280215e-06, + "loss": 0.5578, + "step": 2901 + }, + { + "epoch": 0.1197491128167038, + "grad_norm": 3.2013254787610674, + "learning_rate": 2.9371268675952943e-06, + "loss": 0.6005, + "step": 2902 + }, + { + "epoch": 0.11979037715606174, + "grad_norm": 12.457768217910285, + "learning_rate": 2.937069421291886e-06, + "loss": 0.5765, + "step": 2903 + }, + { + "epoch": 0.11983164149541965, + "grad_norm": 2.705898155036653, + "learning_rate": 2.937011949318823e-06, + "loss": 0.5744, + "step": 2904 + }, + { + "epoch": 0.11987290583477758, + "grad_norm": 2.3773439639335714, + "learning_rate": 2.9369544516771313e-06, + "loss": 0.6007, + "step": 2905 + }, + { + "epoch": 0.11991417017413551, + "grad_norm": 13.693632176958593, + "learning_rate": 2.9368969283678385e-06, + "loss": 0.5896, + "step": 2906 + }, + { + "epoch": 0.11995543451349344, + "grad_norm": 8.502499860277288, + "learning_rate": 2.9368393793919723e-06, + "loss": 0.5435, + "step": 2907 + }, + { + "epoch": 0.11999669885285137, + "grad_norm": 3.5183542644962222, + "learning_rate": 2.9367818047505598e-06, + "loss": 0.5815, + "step": 2908 + }, + { + "epoch": 0.12003796319220929, + "grad_norm": 4.109948644284314, + "learning_rate": 2.9367242044446308e-06, + "loss": 0.6036, + "step": 2909 + }, + { + "epoch": 0.12007922753156722, + "grad_norm": 11.772858233611066, + "learning_rate": 2.936666578475213e-06, + "loss": 0.5841, + "step": 2910 + }, + { + "epoch": 0.12012049187092515, + "grad_norm": 3.1859165218174583, + "learning_rate": 2.9366089268433356e-06, + "loss": 0.5647, + "step": 2911 + }, + { + "epoch": 0.12016175621028308, + "grad_norm": 4.8736426594762134, + "learning_rate": 2.9365512495500294e-06, + "loss": 0.4968, + "step": 2912 + }, + { + "epoch": 0.12020302054964101, + "grad_norm": 3.9207396525686993, + "learning_rate": 2.936493546596324e-06, + "loss": 0.5245, + "step": 2913 + }, + { + "epoch": 0.12024428488899892, + "grad_norm": 6.657548848397861, + "learning_rate": 2.936435817983251e-06, + "loss": 0.6512, + "step": 2914 + }, + { + "epoch": 0.12028554922835685, + "grad_norm": 2.6342971048652446, + "learning_rate": 2.9363780637118402e-06, + "loss": 0.535, + "step": 2915 + }, + { + "epoch": 0.12032681356771478, + "grad_norm": 4.3030736699376995, + "learning_rate": 2.936320283783124e-06, + "loss": 0.5918, + "step": 2916 + }, + { + "epoch": 0.12036807790707271, + "grad_norm": 7.026381980113178, + "learning_rate": 2.9362624781981343e-06, + "loss": 0.6027, + "step": 2917 + }, + { + "epoch": 0.12040934224643063, + "grad_norm": 4.018633311313197, + "learning_rate": 2.936204646957904e-06, + "loss": 0.5343, + "step": 2918 + }, + { + "epoch": 0.12045060658578856, + "grad_norm": 3.1536666411494623, + "learning_rate": 2.9361467900634655e-06, + "loss": 0.5209, + "step": 2919 + }, + { + "epoch": 0.12049187092514649, + "grad_norm": 2.6080375490722334, + "learning_rate": 2.936088907515853e-06, + "loss": 0.608, + "step": 2920 + }, + { + "epoch": 0.12053313526450442, + "grad_norm": 3.9094973442303473, + "learning_rate": 2.9360309993161002e-06, + "loss": 0.5882, + "step": 2921 + }, + { + "epoch": 0.12057439960386235, + "grad_norm": 3.929102219849618, + "learning_rate": 2.9359730654652413e-06, + "loss": 0.605, + "step": 2922 + }, + { + "epoch": 0.12061566394322026, + "grad_norm": 5.455104857669854, + "learning_rate": 2.9359151059643108e-06, + "loss": 0.6223, + "step": 2923 + }, + { + "epoch": 0.1206569282825782, + "grad_norm": 4.303096560333821, + "learning_rate": 2.9358571208143447e-06, + "loss": 0.6286, + "step": 2924 + }, + { + "epoch": 0.12069819262193612, + "grad_norm": 3.061929149483069, + "learning_rate": 2.9357991100163785e-06, + "loss": 0.5503, + "step": 2925 + }, + { + "epoch": 0.12073945696129405, + "grad_norm": 3.554932862967396, + "learning_rate": 2.935741073571448e-06, + "loss": 0.5373, + "step": 2926 + }, + { + "epoch": 0.12078072130065198, + "grad_norm": 34.61772293861639, + "learning_rate": 2.9356830114805904e-06, + "loss": 0.5923, + "step": 2927 + }, + { + "epoch": 0.1208219856400099, + "grad_norm": 4.215531325270724, + "learning_rate": 2.9356249237448433e-06, + "loss": 0.5474, + "step": 2928 + }, + { + "epoch": 0.12086324997936783, + "grad_norm": 3.948775789864377, + "learning_rate": 2.935566810365243e-06, + "loss": 0.6099, + "step": 2929 + }, + { + "epoch": 0.12090451431872576, + "grad_norm": 5.092848704728567, + "learning_rate": 2.9355086713428284e-06, + "loss": 0.5509, + "step": 2930 + }, + { + "epoch": 0.12094577865808369, + "grad_norm": 3.0665215323785127, + "learning_rate": 2.935450506678638e-06, + "loss": 0.5752, + "step": 2931 + }, + { + "epoch": 0.1209870429974416, + "grad_norm": 3.068507495242951, + "learning_rate": 2.9353923163737105e-06, + "loss": 0.5667, + "step": 2932 + }, + { + "epoch": 0.12102830733679953, + "grad_norm": 4.365459600749369, + "learning_rate": 2.935334100429085e-06, + "loss": 0.5154, + "step": 2933 + }, + { + "epoch": 0.12106957167615746, + "grad_norm": 9.280502265190044, + "learning_rate": 2.935275858845802e-06, + "loss": 0.5494, + "step": 2934 + }, + { + "epoch": 0.1211108360155154, + "grad_norm": 5.633826381296252, + "learning_rate": 2.935217591624902e-06, + "loss": 0.5621, + "step": 2935 + }, + { + "epoch": 0.12115210035487332, + "grad_norm": 4.911751973810842, + "learning_rate": 2.9351592987674256e-06, + "loss": 0.5732, + "step": 2936 + }, + { + "epoch": 0.12119336469423124, + "grad_norm": 3.15563422822418, + "learning_rate": 2.9351009802744135e-06, + "loss": 0.5129, + "step": 2937 + }, + { + "epoch": 0.12123462903358917, + "grad_norm": 2.5642989630277553, + "learning_rate": 2.935042636146908e-06, + "loss": 0.5524, + "step": 2938 + }, + { + "epoch": 0.1212758933729471, + "grad_norm": 5.0894626049917235, + "learning_rate": 2.934984266385951e-06, + "loss": 0.5546, + "step": 2939 + }, + { + "epoch": 0.12131715771230503, + "grad_norm": 4.999026007918202, + "learning_rate": 2.934925870992585e-06, + "loss": 0.6028, + "step": 2940 + }, + { + "epoch": 0.12135842205166296, + "grad_norm": 3.858100333804824, + "learning_rate": 2.9348674499678538e-06, + "loss": 0.5239, + "step": 2941 + }, + { + "epoch": 0.12139968639102088, + "grad_norm": 3.112914216323295, + "learning_rate": 2.9348090033128e-06, + "loss": 0.5622, + "step": 2942 + }, + { + "epoch": 0.1214409507303788, + "grad_norm": 2.801502718242418, + "learning_rate": 2.934750531028468e-06, + "loss": 0.5579, + "step": 2943 + }, + { + "epoch": 0.12148221506973673, + "grad_norm": 4.267298129560708, + "learning_rate": 2.9346920331159027e-06, + "loss": 0.5367, + "step": 2944 + }, + { + "epoch": 0.12152347940909466, + "grad_norm": 3.2009220108083434, + "learning_rate": 2.9346335095761486e-06, + "loss": 0.5484, + "step": 2945 + }, + { + "epoch": 0.12156474374845258, + "grad_norm": 3.868176812799374, + "learning_rate": 2.934574960410251e-06, + "loss": 0.5663, + "step": 2946 + }, + { + "epoch": 0.12160600808781051, + "grad_norm": 5.7297460461593115, + "learning_rate": 2.934516385619256e-06, + "loss": 0.5408, + "step": 2947 + }, + { + "epoch": 0.12164727242716844, + "grad_norm": 4.173387299573796, + "learning_rate": 2.9344577852042096e-06, + "loss": 0.5603, + "step": 2948 + }, + { + "epoch": 0.12168853676652637, + "grad_norm": 7.127089389454174, + "learning_rate": 2.934399159166159e-06, + "loss": 0.5664, + "step": 2949 + }, + { + "epoch": 0.1217298011058843, + "grad_norm": 4.466214310747325, + "learning_rate": 2.9343405075061506e-06, + "loss": 0.6349, + "step": 2950 + }, + { + "epoch": 0.12177106544524222, + "grad_norm": 6.78526448929072, + "learning_rate": 2.9342818302252324e-06, + "loss": 0.6086, + "step": 2951 + }, + { + "epoch": 0.12181232978460015, + "grad_norm": 3.1094816190326964, + "learning_rate": 2.9342231273244534e-06, + "loss": 0.5354, + "step": 2952 + }, + { + "epoch": 0.12185359412395808, + "grad_norm": 5.245344367736944, + "learning_rate": 2.9341643988048613e-06, + "loss": 0.6365, + "step": 2953 + }, + { + "epoch": 0.121894858463316, + "grad_norm": 4.007341589423184, + "learning_rate": 2.934105644667505e-06, + "loss": 0.5041, + "step": 2954 + }, + { + "epoch": 0.12193612280267394, + "grad_norm": 3.8648130697888385, + "learning_rate": 2.9340468649134348e-06, + "loss": 0.5688, + "step": 2955 + }, + { + "epoch": 0.12197738714203185, + "grad_norm": 6.032894562755864, + "learning_rate": 2.9339880595436996e-06, + "loss": 0.5479, + "step": 2956 + }, + { + "epoch": 0.12201865148138978, + "grad_norm": 15.921688298117454, + "learning_rate": 2.9339292285593506e-06, + "loss": 0.5736, + "step": 2957 + }, + { + "epoch": 0.12205991582074771, + "grad_norm": 4.613931516600065, + "learning_rate": 2.933870371961438e-06, + "loss": 0.5905, + "step": 2958 + }, + { + "epoch": 0.12210118016010564, + "grad_norm": 5.182212924254643, + "learning_rate": 2.9338114897510144e-06, + "loss": 0.6283, + "step": 2959 + }, + { + "epoch": 0.12214244449946357, + "grad_norm": 5.949662734224178, + "learning_rate": 2.9337525819291304e-06, + "loss": 0.6281, + "step": 2960 + }, + { + "epoch": 0.12218370883882149, + "grad_norm": 4.512961098811582, + "learning_rate": 2.9336936484968384e-06, + "loss": 0.5634, + "step": 2961 + }, + { + "epoch": 0.12222497317817942, + "grad_norm": 2.5236600877037394, + "learning_rate": 2.933634689455191e-06, + "loss": 0.5822, + "step": 2962 + }, + { + "epoch": 0.12226623751753735, + "grad_norm": 3.237499811596175, + "learning_rate": 2.933575704805242e-06, + "loss": 0.5488, + "step": 2963 + }, + { + "epoch": 0.12230750185689528, + "grad_norm": 3.347392677893197, + "learning_rate": 2.933516694548045e-06, + "loss": 0.6109, + "step": 2964 + }, + { + "epoch": 0.12234876619625319, + "grad_norm": 4.673897266770675, + "learning_rate": 2.9334576586846536e-06, + "loss": 0.5772, + "step": 2965 + }, + { + "epoch": 0.12239003053561112, + "grad_norm": 4.927561264940949, + "learning_rate": 2.933398597216122e-06, + "loss": 0.563, + "step": 2966 + }, + { + "epoch": 0.12243129487496905, + "grad_norm": 3.3498445546478535, + "learning_rate": 2.933339510143506e-06, + "loss": 0.5844, + "step": 2967 + }, + { + "epoch": 0.12247255921432698, + "grad_norm": 5.4002967540418005, + "learning_rate": 2.9332803974678606e-06, + "loss": 0.6004, + "step": 2968 + }, + { + "epoch": 0.12251382355368491, + "grad_norm": 6.033842790989094, + "learning_rate": 2.9332212591902418e-06, + "loss": 0.6007, + "step": 2969 + }, + { + "epoch": 0.12255508789304283, + "grad_norm": 4.686632897142678, + "learning_rate": 2.933162095311706e-06, + "loss": 0.578, + "step": 2970 + }, + { + "epoch": 0.12259635223240076, + "grad_norm": 12.379313532924456, + "learning_rate": 2.93310290583331e-06, + "loss": 0.5553, + "step": 2971 + }, + { + "epoch": 0.12263761657175869, + "grad_norm": 3.8210054860150184, + "learning_rate": 2.9330436907561103e-06, + "loss": 0.577, + "step": 2972 + }, + { + "epoch": 0.12267888091111662, + "grad_norm": 3.344106352798886, + "learning_rate": 2.9329844500811663e-06, + "loss": 0.5691, + "step": 2973 + }, + { + "epoch": 0.12272014525047455, + "grad_norm": 9.50225274772944, + "learning_rate": 2.932925183809535e-06, + "loss": 0.567, + "step": 2974 + }, + { + "epoch": 0.12276140958983246, + "grad_norm": 4.483948472983047, + "learning_rate": 2.9328658919422754e-06, + "loss": 0.6158, + "step": 2975 + }, + { + "epoch": 0.12280267392919039, + "grad_norm": 4.603238146285378, + "learning_rate": 2.932806574480446e-06, + "loss": 0.562, + "step": 2976 + }, + { + "epoch": 0.12284393826854832, + "grad_norm": 2.891428062853007, + "learning_rate": 2.932747231425107e-06, + "loss": 0.6029, + "step": 2977 + }, + { + "epoch": 0.12288520260790625, + "grad_norm": 7.5120587181261, + "learning_rate": 2.9326878627773184e-06, + "loss": 0.5775, + "step": 2978 + }, + { + "epoch": 0.12292646694726417, + "grad_norm": 5.986678570097135, + "learning_rate": 2.9326284685381404e-06, + "loss": 0.5924, + "step": 2979 + }, + { + "epoch": 0.1229677312866221, + "grad_norm": 3.921391252855099, + "learning_rate": 2.932569048708634e-06, + "loss": 0.5655, + "step": 2980 + }, + { + "epoch": 0.12300899562598003, + "grad_norm": 6.218378764651969, + "learning_rate": 2.932509603289861e-06, + "loss": 0.568, + "step": 2981 + }, + { + "epoch": 0.12305025996533796, + "grad_norm": 3.6962244128981294, + "learning_rate": 2.932450132282883e-06, + "loss": 0.61, + "step": 2982 + }, + { + "epoch": 0.12309152430469589, + "grad_norm": 5.090564449112397, + "learning_rate": 2.9323906356887615e-06, + "loss": 0.5912, + "step": 2983 + }, + { + "epoch": 0.1231327886440538, + "grad_norm": 11.238393622397664, + "learning_rate": 2.9323311135085607e-06, + "loss": 0.5385, + "step": 2984 + }, + { + "epoch": 0.12317405298341173, + "grad_norm": 2.849583193789995, + "learning_rate": 2.932271565743343e-06, + "loss": 0.5609, + "step": 2985 + }, + { + "epoch": 0.12321531732276966, + "grad_norm": 5.367538949311372, + "learning_rate": 2.9322119923941714e-06, + "loss": 0.5868, + "step": 2986 + }, + { + "epoch": 0.12325658166212759, + "grad_norm": 5.32740665768815, + "learning_rate": 2.9321523934621116e-06, + "loss": 0.5367, + "step": 2987 + }, + { + "epoch": 0.12329784600148552, + "grad_norm": 6.822722505901729, + "learning_rate": 2.932092768948227e-06, + "loss": 0.596, + "step": 2988 + }, + { + "epoch": 0.12333911034084344, + "grad_norm": 3.2449606033089338, + "learning_rate": 2.932033118853583e-06, + "loss": 0.5961, + "step": 2989 + }, + { + "epoch": 0.12338037468020137, + "grad_norm": 2.850835437779471, + "learning_rate": 2.9319734431792453e-06, + "loss": 0.5355, + "step": 2990 + }, + { + "epoch": 0.1234216390195593, + "grad_norm": 9.430096096027157, + "learning_rate": 2.9319137419262793e-06, + "loss": 0.5774, + "step": 2991 + }, + { + "epoch": 0.12346290335891723, + "grad_norm": 3.971257504112778, + "learning_rate": 2.931854015095752e-06, + "loss": 0.6299, + "step": 2992 + }, + { + "epoch": 0.12350416769827514, + "grad_norm": 3.304523025048569, + "learning_rate": 2.93179426268873e-06, + "loss": 0.4835, + "step": 2993 + }, + { + "epoch": 0.12354543203763307, + "grad_norm": 5.289935125592148, + "learning_rate": 2.9317344847062806e-06, + "loss": 0.5798, + "step": 2994 + }, + { + "epoch": 0.123586696376991, + "grad_norm": 8.145572569538293, + "learning_rate": 2.9316746811494714e-06, + "loss": 0.5607, + "step": 2995 + }, + { + "epoch": 0.12362796071634893, + "grad_norm": 2.313599492976742, + "learning_rate": 2.9316148520193713e-06, + "loss": 0.5629, + "step": 2996 + }, + { + "epoch": 0.12366922505570686, + "grad_norm": 2.832986263197555, + "learning_rate": 2.931554997317049e-06, + "loss": 0.5699, + "step": 2997 + }, + { + "epoch": 0.12371048939506478, + "grad_norm": 3.534143549889822, + "learning_rate": 2.9314951170435724e-06, + "loss": 0.5478, + "step": 2998 + }, + { + "epoch": 0.12375175373442271, + "grad_norm": 3.749581435403968, + "learning_rate": 2.931435211200012e-06, + "loss": 0.5222, + "step": 2999 + }, + { + "epoch": 0.12379301807378064, + "grad_norm": 3.3866246820848813, + "learning_rate": 2.9313752797874385e-06, + "loss": 0.5339, + "step": 3000 + }, + { + "epoch": 0.12383428241313857, + "grad_norm": 2.964378084543894, + "learning_rate": 2.931315322806921e-06, + "loss": 0.5984, + "step": 3001 + }, + { + "epoch": 0.1238755467524965, + "grad_norm": 25.074358333578555, + "learning_rate": 2.9312553402595317e-06, + "loss": 0.5674, + "step": 3002 + }, + { + "epoch": 0.12391681109185441, + "grad_norm": 4.321244029215082, + "learning_rate": 2.9311953321463415e-06, + "loss": 0.5286, + "step": 3003 + }, + { + "epoch": 0.12395807543121234, + "grad_norm": 4.8377478768005, + "learning_rate": 2.9311352984684225e-06, + "loss": 0.5728, + "step": 3004 + }, + { + "epoch": 0.12399933977057027, + "grad_norm": 6.120285820977385, + "learning_rate": 2.9310752392268467e-06, + "loss": 0.52, + "step": 3005 + }, + { + "epoch": 0.1240406041099282, + "grad_norm": 3.3617743467604195, + "learning_rate": 2.931015154422687e-06, + "loss": 0.5655, + "step": 3006 + }, + { + "epoch": 0.12408186844928612, + "grad_norm": 4.946464077414407, + "learning_rate": 2.930955044057017e-06, + "loss": 0.5966, + "step": 3007 + }, + { + "epoch": 0.12412313278864405, + "grad_norm": 9.777358682904824, + "learning_rate": 2.93089490813091e-06, + "loss": 0.5862, + "step": 3008 + }, + { + "epoch": 0.12416439712800198, + "grad_norm": 6.1190653432675175, + "learning_rate": 2.930834746645441e-06, + "loss": 0.5915, + "step": 3009 + }, + { + "epoch": 0.12420566146735991, + "grad_norm": 51.90990065007412, + "learning_rate": 2.930774559601683e-06, + "loss": 0.563, + "step": 3010 + }, + { + "epoch": 0.12424692580671784, + "grad_norm": 5.038825928875009, + "learning_rate": 2.930714347000713e-06, + "loss": 0.5761, + "step": 3011 + }, + { + "epoch": 0.12428819014607576, + "grad_norm": 3.0899260557964516, + "learning_rate": 2.9306541088436056e-06, + "loss": 0.5361, + "step": 3012 + }, + { + "epoch": 0.12432945448543369, + "grad_norm": 17.362509086729585, + "learning_rate": 2.930593845131436e-06, + "loss": 0.6072, + "step": 3013 + }, + { + "epoch": 0.12437071882479162, + "grad_norm": 4.365593485066175, + "learning_rate": 2.9305335558652827e-06, + "loss": 0.5813, + "step": 3014 + }, + { + "epoch": 0.12441198316414954, + "grad_norm": 3.797622420544645, + "learning_rate": 2.930473241046221e-06, + "loss": 0.5817, + "step": 3015 + }, + { + "epoch": 0.12445324750350747, + "grad_norm": 4.932580465177176, + "learning_rate": 2.9304129006753285e-06, + "loss": 0.6171, + "step": 3016 + }, + { + "epoch": 0.12449451184286539, + "grad_norm": 3.9137115051138474, + "learning_rate": 2.9303525347536835e-06, + "loss": 0.5395, + "step": 3017 + }, + { + "epoch": 0.12453577618222332, + "grad_norm": 2.834708501438458, + "learning_rate": 2.930292143282364e-06, + "loss": 0.5393, + "step": 3018 + }, + { + "epoch": 0.12457704052158125, + "grad_norm": 8.828959936538194, + "learning_rate": 2.930231726262448e-06, + "loss": 0.5909, + "step": 3019 + }, + { + "epoch": 0.12461830486093918, + "grad_norm": 4.0651835864081, + "learning_rate": 2.9301712836950164e-06, + "loss": 0.6231, + "step": 3020 + }, + { + "epoch": 0.12465956920029711, + "grad_norm": 3.7098534098016644, + "learning_rate": 2.9301108155811477e-06, + "loss": 0.6006, + "step": 3021 + }, + { + "epoch": 0.12470083353965503, + "grad_norm": 3.9428553235115933, + "learning_rate": 2.9300503219219227e-06, + "loss": 0.606, + "step": 3022 + }, + { + "epoch": 0.12474209787901296, + "grad_norm": 3.1409503773862024, + "learning_rate": 2.9299898027184207e-06, + "loss": 0.556, + "step": 3023 + }, + { + "epoch": 0.12478336221837089, + "grad_norm": 5.669417250118151, + "learning_rate": 2.9299292579717236e-06, + "loss": 0.5928, + "step": 3024 + }, + { + "epoch": 0.12482462655772882, + "grad_norm": 2.7884639563064666, + "learning_rate": 2.9298686876829137e-06, + "loss": 0.5775, + "step": 3025 + }, + { + "epoch": 0.12486589089708673, + "grad_norm": 4.3927500547303655, + "learning_rate": 2.9298080918530716e-06, + "loss": 0.5633, + "step": 3026 + }, + { + "epoch": 0.12490715523644466, + "grad_norm": 3.4053789399295677, + "learning_rate": 2.92974747048328e-06, + "loss": 0.6134, + "step": 3027 + }, + { + "epoch": 0.12494841957580259, + "grad_norm": 3.9818215697054966, + "learning_rate": 2.929686823574622e-06, + "loss": 0.5771, + "step": 3028 + }, + { + "epoch": 0.12498968391516052, + "grad_norm": 2.3932937435682446, + "learning_rate": 2.92962615112818e-06, + "loss": 0.5711, + "step": 3029 + }, + { + "epoch": 0.12503094825451844, + "grad_norm": 4.5194463233095465, + "learning_rate": 2.92956545314504e-06, + "loss": 0.5915, + "step": 3030 + }, + { + "epoch": 0.12507221259387638, + "grad_norm": 14.709261054256743, + "learning_rate": 2.9295047296262836e-06, + "loss": 0.59, + "step": 3031 + }, + { + "epoch": 0.1251134769332343, + "grad_norm": 6.143905404877758, + "learning_rate": 2.929443980572997e-06, + "loss": 0.6311, + "step": 3032 + }, + { + "epoch": 0.1251547412725922, + "grad_norm": 6.1396377991578195, + "learning_rate": 2.9293832059862655e-06, + "loss": 0.5935, + "step": 3033 + }, + { + "epoch": 0.12519600561195016, + "grad_norm": 5.764673023920491, + "learning_rate": 2.9293224058671736e-06, + "loss": 0.579, + "step": 3034 + }, + { + "epoch": 0.12523726995130807, + "grad_norm": 10.855180457871702, + "learning_rate": 2.9292615802168083e-06, + "loss": 0.618, + "step": 3035 + }, + { + "epoch": 0.12527853429066602, + "grad_norm": 3.0000511983513696, + "learning_rate": 2.9292007290362554e-06, + "loss": 0.59, + "step": 3036 + }, + { + "epoch": 0.12531979863002393, + "grad_norm": 3.4280417459033248, + "learning_rate": 2.929139852326602e-06, + "loss": 0.6363, + "step": 3037 + }, + { + "epoch": 0.12536106296938185, + "grad_norm": 5.240401285270147, + "learning_rate": 2.9290789500889365e-06, + "loss": 0.5563, + "step": 3038 + }, + { + "epoch": 0.1254023273087398, + "grad_norm": 3.039283741117371, + "learning_rate": 2.9290180223243448e-06, + "loss": 0.5903, + "step": 3039 + }, + { + "epoch": 0.1254435916480977, + "grad_norm": 3.2195751251272213, + "learning_rate": 2.928957069033917e-06, + "loss": 0.6213, + "step": 3040 + }, + { + "epoch": 0.12548485598745565, + "grad_norm": 5.303775251368104, + "learning_rate": 2.928896090218741e-06, + "loss": 0.5297, + "step": 3041 + }, + { + "epoch": 0.12552612032681357, + "grad_norm": 3.1215174731474846, + "learning_rate": 2.9288350858799066e-06, + "loss": 0.5948, + "step": 3042 + }, + { + "epoch": 0.12556738466617148, + "grad_norm": 2.9009741642269606, + "learning_rate": 2.9287740560185036e-06, + "loss": 0.5496, + "step": 3043 + }, + { + "epoch": 0.12560864900552943, + "grad_norm": 2.2502069170414867, + "learning_rate": 2.9287130006356205e-06, + "loss": 0.5187, + "step": 3044 + }, + { + "epoch": 0.12564991334488734, + "grad_norm": 4.64623825174457, + "learning_rate": 2.92865191973235e-06, + "loss": 0.5635, + "step": 3045 + }, + { + "epoch": 0.1256911776842453, + "grad_norm": 4.71801732811493, + "learning_rate": 2.9285908133097822e-06, + "loss": 0.5782, + "step": 3046 + }, + { + "epoch": 0.1257324420236032, + "grad_norm": 5.625913091441609, + "learning_rate": 2.9285296813690086e-06, + "loss": 0.6396, + "step": 3047 + }, + { + "epoch": 0.12577370636296112, + "grad_norm": 4.221782322240253, + "learning_rate": 2.9284685239111214e-06, + "loss": 0.5992, + "step": 3048 + }, + { + "epoch": 0.12581497070231906, + "grad_norm": 5.168044417992103, + "learning_rate": 2.9284073409372125e-06, + "loss": 0.5681, + "step": 3049 + }, + { + "epoch": 0.12585623504167698, + "grad_norm": 3.1422613139372872, + "learning_rate": 2.9283461324483752e-06, + "loss": 0.5754, + "step": 3050 + }, + { + "epoch": 0.12589749938103492, + "grad_norm": 2.5865847534713198, + "learning_rate": 2.928284898445703e-06, + "loss": 0.599, + "step": 3051 + }, + { + "epoch": 0.12593876372039284, + "grad_norm": 36.28827402233453, + "learning_rate": 2.9282236389302893e-06, + "loss": 0.5233, + "step": 3052 + }, + { + "epoch": 0.12598002805975075, + "grad_norm": 3.216621414567338, + "learning_rate": 2.928162353903229e-06, + "loss": 0.5577, + "step": 3053 + }, + { + "epoch": 0.1260212923991087, + "grad_norm": 4.981771190562928, + "learning_rate": 2.928101043365616e-06, + "loss": 0.5605, + "step": 3054 + }, + { + "epoch": 0.1260625567384666, + "grad_norm": 5.8493832909607555, + "learning_rate": 2.9280397073185457e-06, + "loss": 0.6483, + "step": 3055 + }, + { + "epoch": 0.12610382107782456, + "grad_norm": 5.400803893526846, + "learning_rate": 2.927978345763114e-06, + "loss": 0.6102, + "step": 3056 + }, + { + "epoch": 0.12614508541718247, + "grad_norm": 5.843484224330757, + "learning_rate": 2.9279169587004165e-06, + "loss": 0.5266, + "step": 3057 + }, + { + "epoch": 0.1261863497565404, + "grad_norm": 8.08757104494184, + "learning_rate": 2.9278555461315504e-06, + "loss": 0.5186, + "step": 3058 + }, + { + "epoch": 0.12622761409589833, + "grad_norm": 5.046137452366864, + "learning_rate": 2.927794108057612e-06, + "loss": 0.5487, + "step": 3059 + }, + { + "epoch": 0.12626887843525625, + "grad_norm": 8.626164299836182, + "learning_rate": 2.9277326444796995e-06, + "loss": 0.5591, + "step": 3060 + }, + { + "epoch": 0.12631014277461416, + "grad_norm": 3.870960066152709, + "learning_rate": 2.9276711553989098e-06, + "loss": 0.6344, + "step": 3061 + }, + { + "epoch": 0.1263514071139721, + "grad_norm": 4.627201427820283, + "learning_rate": 2.927609640816342e-06, + "loss": 0.6083, + "step": 3062 + }, + { + "epoch": 0.12639267145333002, + "grad_norm": 3.7967963640191136, + "learning_rate": 2.9275481007330947e-06, + "loss": 0.5655, + "step": 3063 + }, + { + "epoch": 0.12643393579268797, + "grad_norm": 2.4063482736724033, + "learning_rate": 2.927486535150267e-06, + "loss": 0.5279, + "step": 3064 + }, + { + "epoch": 0.12647520013204588, + "grad_norm": 3.938758764615801, + "learning_rate": 2.927424944068959e-06, + "loss": 0.5756, + "step": 3065 + }, + { + "epoch": 0.1265164644714038, + "grad_norm": 3.8285665418645904, + "learning_rate": 2.9273633274902705e-06, + "loss": 0.5629, + "step": 3066 + }, + { + "epoch": 0.12655772881076174, + "grad_norm": 3.983182933704335, + "learning_rate": 2.927301685415302e-06, + "loss": 0.575, + "step": 3067 + }, + { + "epoch": 0.12659899315011966, + "grad_norm": 2.9005579456032815, + "learning_rate": 2.927240017845155e-06, + "loss": 0.5791, + "step": 3068 + }, + { + "epoch": 0.1266402574894776, + "grad_norm": 3.1970242910003583, + "learning_rate": 2.927178324780931e-06, + "loss": 0.6049, + "step": 3069 + }, + { + "epoch": 0.12668152182883552, + "grad_norm": 4.140216021048778, + "learning_rate": 2.927116606223732e-06, + "loss": 0.489, + "step": 3070 + }, + { + "epoch": 0.12672278616819344, + "grad_norm": 6.884057387356059, + "learning_rate": 2.92705486217466e-06, + "loss": 0.5662, + "step": 3071 + }, + { + "epoch": 0.12676405050755138, + "grad_norm": 2.557506062700316, + "learning_rate": 2.926993092634818e-06, + "loss": 0.5669, + "step": 3072 + }, + { + "epoch": 0.1268053148469093, + "grad_norm": 7.641507197848172, + "learning_rate": 2.9269312976053107e-06, + "loss": 0.5894, + "step": 3073 + }, + { + "epoch": 0.12684657918626724, + "grad_norm": 3.0775899282566628, + "learning_rate": 2.9268694770872397e-06, + "loss": 0.5184, + "step": 3074 + }, + { + "epoch": 0.12688784352562515, + "grad_norm": 3.7314256492723468, + "learning_rate": 2.9268076310817106e-06, + "loss": 0.5802, + "step": 3075 + }, + { + "epoch": 0.12692910786498307, + "grad_norm": 5.171285126690074, + "learning_rate": 2.926745759589828e-06, + "loss": 0.6269, + "step": 3076 + }, + { + "epoch": 0.12697037220434101, + "grad_norm": 5.556884918633443, + "learning_rate": 2.926683862612697e-06, + "loss": 0.6153, + "step": 3077 + }, + { + "epoch": 0.12701163654369893, + "grad_norm": 5.83924273995614, + "learning_rate": 2.9266219401514233e-06, + "loss": 0.6127, + "step": 3078 + }, + { + "epoch": 0.12705290088305687, + "grad_norm": 3.986524306231177, + "learning_rate": 2.9265599922071125e-06, + "loss": 0.5244, + "step": 3079 + }, + { + "epoch": 0.1270941652224148, + "grad_norm": 15.474632244618913, + "learning_rate": 2.926498018780872e-06, + "loss": 0.5941, + "step": 3080 + }, + { + "epoch": 0.1271354295617727, + "grad_norm": 8.115428698145195, + "learning_rate": 2.926436019873808e-06, + "loss": 0.5754, + "step": 3081 + }, + { + "epoch": 0.12717669390113065, + "grad_norm": 8.170788361651935, + "learning_rate": 2.9263739954870285e-06, + "loss": 0.5617, + "step": 3082 + }, + { + "epoch": 0.12721795824048857, + "grad_norm": 4.258050210658809, + "learning_rate": 2.9263119456216407e-06, + "loss": 0.6172, + "step": 3083 + }, + { + "epoch": 0.1272592225798465, + "grad_norm": 5.213649631918372, + "learning_rate": 2.926249870278754e-06, + "loss": 0.5803, + "step": 3084 + }, + { + "epoch": 0.12730048691920443, + "grad_norm": 21.242311950255907, + "learning_rate": 2.9261877694594764e-06, + "loss": 0.5544, + "step": 3085 + }, + { + "epoch": 0.12734175125856234, + "grad_norm": 10.671179035330875, + "learning_rate": 2.9261256431649175e-06, + "loss": 0.5632, + "step": 3086 + }, + { + "epoch": 0.12738301559792028, + "grad_norm": 3.345416938002851, + "learning_rate": 2.9260634913961874e-06, + "loss": 0.5529, + "step": 3087 + }, + { + "epoch": 0.1274242799372782, + "grad_norm": 4.067585232997685, + "learning_rate": 2.9260013141543954e-06, + "loss": 0.5566, + "step": 3088 + }, + { + "epoch": 0.12746554427663614, + "grad_norm": 2.308595334008086, + "learning_rate": 2.925939111440653e-06, + "loss": 0.5295, + "step": 3089 + }, + { + "epoch": 0.12750680861599406, + "grad_norm": 3.3774223388154723, + "learning_rate": 2.9258768832560707e-06, + "loss": 0.553, + "step": 3090 + }, + { + "epoch": 0.12754807295535198, + "grad_norm": 2.9487368042886923, + "learning_rate": 2.92581462960176e-06, + "loss": 0.5685, + "step": 3091 + }, + { + "epoch": 0.12758933729470992, + "grad_norm": 4.03814855849964, + "learning_rate": 2.9257523504788334e-06, + "loss": 0.5889, + "step": 3092 + }, + { + "epoch": 0.12763060163406784, + "grad_norm": 4.870252838905673, + "learning_rate": 2.9256900458884033e-06, + "loss": 0.5854, + "step": 3093 + }, + { + "epoch": 0.12767186597342575, + "grad_norm": 4.20222562180669, + "learning_rate": 2.925627715831582e-06, + "loss": 0.5676, + "step": 3094 + }, + { + "epoch": 0.1277131303127837, + "grad_norm": 3.571101191519321, + "learning_rate": 2.925565360309484e-06, + "loss": 0.577, + "step": 3095 + }, + { + "epoch": 0.1277543946521416, + "grad_norm": 4.1985661849956, + "learning_rate": 2.925502979323222e-06, + "loss": 0.6247, + "step": 3096 + }, + { + "epoch": 0.12779565899149956, + "grad_norm": 4.871875342774196, + "learning_rate": 2.9254405728739108e-06, + "loss": 0.5393, + "step": 3097 + }, + { + "epoch": 0.12783692333085747, + "grad_norm": 2.5566152751611892, + "learning_rate": 2.925378140962665e-06, + "loss": 0.6357, + "step": 3098 + }, + { + "epoch": 0.1278781876702154, + "grad_norm": 2.9147748790232435, + "learning_rate": 2.9253156835905997e-06, + "loss": 0.508, + "step": 3099 + }, + { + "epoch": 0.12791945200957333, + "grad_norm": 2.594221689490149, + "learning_rate": 2.9252532007588312e-06, + "loss": 0.5538, + "step": 3100 + }, + { + "epoch": 0.12796071634893125, + "grad_norm": 3.9548251061064557, + "learning_rate": 2.925190692468475e-06, + "loss": 0.5181, + "step": 3101 + }, + { + "epoch": 0.1280019806882892, + "grad_norm": 2.701226063086153, + "learning_rate": 2.9251281587206476e-06, + "loss": 0.5683, + "step": 3102 + }, + { + "epoch": 0.1280432450276471, + "grad_norm": 18.73788525624525, + "learning_rate": 2.925065599516466e-06, + "loss": 0.5243, + "step": 3103 + }, + { + "epoch": 0.12808450936700502, + "grad_norm": 4.696079420689677, + "learning_rate": 2.925003014857048e-06, + "loss": 0.511, + "step": 3104 + }, + { + "epoch": 0.12812577370636297, + "grad_norm": 4.99628754624696, + "learning_rate": 2.9249404047435113e-06, + "loss": 0.6081, + "step": 3105 + }, + { + "epoch": 0.12816703804572088, + "grad_norm": 3.6316382007839985, + "learning_rate": 2.9248777691769743e-06, + "loss": 0.5915, + "step": 3106 + }, + { + "epoch": 0.12820830238507883, + "grad_norm": 4.130723504381971, + "learning_rate": 2.9248151081585558e-06, + "loss": 0.6055, + "step": 3107 + }, + { + "epoch": 0.12824956672443674, + "grad_norm": 2.6716035332199417, + "learning_rate": 2.924752421689375e-06, + "loss": 0.5848, + "step": 3108 + }, + { + "epoch": 0.12829083106379466, + "grad_norm": 5.365055402103864, + "learning_rate": 2.924689709770552e-06, + "loss": 0.561, + "step": 3109 + }, + { + "epoch": 0.1283320954031526, + "grad_norm": 6.057327417641684, + "learning_rate": 2.9246269724032067e-06, + "loss": 0.5593, + "step": 3110 + }, + { + "epoch": 0.12837335974251052, + "grad_norm": 31.741477395744674, + "learning_rate": 2.92456420958846e-06, + "loss": 0.6198, + "step": 3111 + }, + { + "epoch": 0.12841462408186846, + "grad_norm": 5.911048407184479, + "learning_rate": 2.924501421327433e-06, + "loss": 0.5393, + "step": 3112 + }, + { + "epoch": 0.12845588842122638, + "grad_norm": 14.36545035766113, + "learning_rate": 2.9244386076212464e-06, + "loss": 0.5994, + "step": 3113 + }, + { + "epoch": 0.1284971527605843, + "grad_norm": 3.5620036139053597, + "learning_rate": 2.9243757684710233e-06, + "loss": 0.5493, + "step": 3114 + }, + { + "epoch": 0.12853841709994224, + "grad_norm": 3.644850444249849, + "learning_rate": 2.9243129038778853e-06, + "loss": 0.5424, + "step": 3115 + }, + { + "epoch": 0.12857968143930015, + "grad_norm": 2.996636074806126, + "learning_rate": 2.924250013842956e-06, + "loss": 0.563, + "step": 3116 + }, + { + "epoch": 0.1286209457786581, + "grad_norm": 3.7073533315923717, + "learning_rate": 2.9241870983673585e-06, + "loss": 0.5616, + "step": 3117 + }, + { + "epoch": 0.128662210118016, + "grad_norm": 4.389839715162769, + "learning_rate": 2.924124157452217e-06, + "loss": 0.5502, + "step": 3118 + }, + { + "epoch": 0.12870347445737393, + "grad_norm": 3.6742468586407764, + "learning_rate": 2.924061191098655e-06, + "loss": 0.5452, + "step": 3119 + }, + { + "epoch": 0.12874473879673187, + "grad_norm": 3.7774766084563525, + "learning_rate": 2.9239981993077976e-06, + "loss": 0.6016, + "step": 3120 + }, + { + "epoch": 0.1287860031360898, + "grad_norm": 7.511213634328951, + "learning_rate": 2.9239351820807705e-06, + "loss": 0.623, + "step": 3121 + }, + { + "epoch": 0.1288272674754477, + "grad_norm": 11.020498793149882, + "learning_rate": 2.9238721394186985e-06, + "loss": 0.6082, + "step": 3122 + }, + { + "epoch": 0.12886853181480565, + "grad_norm": 3.054710187153351, + "learning_rate": 2.923809071322708e-06, + "loss": 0.5522, + "step": 3123 + }, + { + "epoch": 0.12890979615416356, + "grad_norm": 5.079942426404649, + "learning_rate": 2.9237459777939264e-06, + "loss": 0.5788, + "step": 3124 + }, + { + "epoch": 0.1289510604935215, + "grad_norm": 3.7864227624674944, + "learning_rate": 2.9236828588334795e-06, + "loss": 0.6018, + "step": 3125 + }, + { + "epoch": 0.12899232483287942, + "grad_norm": 6.03031567405362, + "learning_rate": 2.923619714442495e-06, + "loss": 0.5449, + "step": 3126 + }, + { + "epoch": 0.12903358917223734, + "grad_norm": 3.933490612660886, + "learning_rate": 2.9235565446221014e-06, + "loss": 0.5649, + "step": 3127 + }, + { + "epoch": 0.12907485351159528, + "grad_norm": 3.357788623006028, + "learning_rate": 2.9234933493734267e-06, + "loss": 0.5629, + "step": 3128 + }, + { + "epoch": 0.1291161178509532, + "grad_norm": 3.935905897884016, + "learning_rate": 2.9234301286975997e-06, + "loss": 0.5488, + "step": 3129 + }, + { + "epoch": 0.12915738219031114, + "grad_norm": 2.787226399996139, + "learning_rate": 2.9233668825957494e-06, + "loss": 0.5095, + "step": 3130 + }, + { + "epoch": 0.12919864652966906, + "grad_norm": 9.60923295968991, + "learning_rate": 2.923303611069006e-06, + "loss": 0.5597, + "step": 3131 + }, + { + "epoch": 0.12923991086902697, + "grad_norm": 3.2386970010127163, + "learning_rate": 2.9232403141185e-06, + "loss": 0.5799, + "step": 3132 + }, + { + "epoch": 0.12928117520838492, + "grad_norm": 3.9895729035518817, + "learning_rate": 2.9231769917453614e-06, + "loss": 0.5955, + "step": 3133 + }, + { + "epoch": 0.12932243954774283, + "grad_norm": 8.639083821432656, + "learning_rate": 2.9231136439507213e-06, + "loss": 0.6108, + "step": 3134 + }, + { + "epoch": 0.12936370388710078, + "grad_norm": 3.0229972979551856, + "learning_rate": 2.9230502707357115e-06, + "loss": 0.5884, + "step": 3135 + }, + { + "epoch": 0.1294049682264587, + "grad_norm": 2.7720118987575204, + "learning_rate": 2.922986872101464e-06, + "loss": 0.5744, + "step": 3136 + }, + { + "epoch": 0.1294462325658166, + "grad_norm": 4.410366909129751, + "learning_rate": 2.922923448049111e-06, + "loss": 0.5796, + "step": 3137 + }, + { + "epoch": 0.12948749690517455, + "grad_norm": 2.993651466795035, + "learning_rate": 2.9228599985797855e-06, + "loss": 0.6174, + "step": 3138 + }, + { + "epoch": 0.12952876124453247, + "grad_norm": 15.741662846559043, + "learning_rate": 2.9227965236946216e-06, + "loss": 0.582, + "step": 3139 + }, + { + "epoch": 0.1295700255838904, + "grad_norm": 3.427415375895973, + "learning_rate": 2.922733023394752e-06, + "loss": 0.5695, + "step": 3140 + }, + { + "epoch": 0.12961128992324833, + "grad_norm": 3.253504459559392, + "learning_rate": 2.9226694976813116e-06, + "loss": 0.5896, + "step": 3141 + }, + { + "epoch": 0.12965255426260625, + "grad_norm": 4.495844448172313, + "learning_rate": 2.922605946555435e-06, + "loss": 0.5366, + "step": 3142 + }, + { + "epoch": 0.1296938186019642, + "grad_norm": 3.215973291769538, + "learning_rate": 2.922542370018257e-06, + "loss": 0.6034, + "step": 3143 + }, + { + "epoch": 0.1297350829413221, + "grad_norm": 4.0336342752516305, + "learning_rate": 2.9224787680709145e-06, + "loss": 0.6286, + "step": 3144 + }, + { + "epoch": 0.12977634728068005, + "grad_norm": 3.4555947863553245, + "learning_rate": 2.922415140714542e-06, + "loss": 0.5699, + "step": 3145 + }, + { + "epoch": 0.12981761162003796, + "grad_norm": 4.126044346917905, + "learning_rate": 2.922351487950277e-06, + "loss": 0.6084, + "step": 3146 + }, + { + "epoch": 0.12985887595939588, + "grad_norm": 13.167450159560008, + "learning_rate": 2.922287809779256e-06, + "loss": 0.6252, + "step": 3147 + }, + { + "epoch": 0.12990014029875382, + "grad_norm": 4.400892030651847, + "learning_rate": 2.922224106202617e-06, + "loss": 0.5849, + "step": 3148 + }, + { + "epoch": 0.12994140463811174, + "grad_norm": 3.633256307529518, + "learning_rate": 2.9221603772214973e-06, + "loss": 0.5766, + "step": 3149 + }, + { + "epoch": 0.12998266897746968, + "grad_norm": 5.663434768211215, + "learning_rate": 2.922096622837036e-06, + "loss": 0.619, + "step": 3150 + }, + { + "epoch": 0.1300239333168276, + "grad_norm": 3.1521202572382387, + "learning_rate": 2.9220328430503713e-06, + "loss": 0.5378, + "step": 3151 + }, + { + "epoch": 0.13006519765618552, + "grad_norm": 6.178461578545461, + "learning_rate": 2.9219690378626424e-06, + "loss": 0.6619, + "step": 3152 + }, + { + "epoch": 0.13010646199554346, + "grad_norm": 3.8757170102512712, + "learning_rate": 2.921905207274989e-06, + "loss": 0.5836, + "step": 3153 + }, + { + "epoch": 0.13014772633490138, + "grad_norm": 3.1097078945049086, + "learning_rate": 2.9218413512885522e-06, + "loss": 0.5574, + "step": 3154 + }, + { + "epoch": 0.1301889906742593, + "grad_norm": 3.6452977817452483, + "learning_rate": 2.921777469904472e-06, + "loss": 0.5979, + "step": 3155 + }, + { + "epoch": 0.13023025501361724, + "grad_norm": 5.551677164877332, + "learning_rate": 2.9217135631238892e-06, + "loss": 0.5985, + "step": 3156 + }, + { + "epoch": 0.13027151935297515, + "grad_norm": 3.5370340692728663, + "learning_rate": 2.9216496309479454e-06, + "loss": 0.5674, + "step": 3157 + }, + { + "epoch": 0.1303127836923331, + "grad_norm": 2.5601768387047463, + "learning_rate": 2.9215856733777828e-06, + "loss": 0.5049, + "step": 3158 + }, + { + "epoch": 0.130354048031691, + "grad_norm": 3.1121727012669003, + "learning_rate": 2.921521690414544e-06, + "loss": 0.5524, + "step": 3159 + }, + { + "epoch": 0.13039531237104893, + "grad_norm": 10.081104742934908, + "learning_rate": 2.921457682059372e-06, + "loss": 0.5471, + "step": 3160 + }, + { + "epoch": 0.13043657671040687, + "grad_norm": 4.755832142379231, + "learning_rate": 2.9213936483134093e-06, + "loss": 0.5519, + "step": 3161 + }, + { + "epoch": 0.1304778410497648, + "grad_norm": 4.921080109677595, + "learning_rate": 2.9213295891778002e-06, + "loss": 0.6519, + "step": 3162 + }, + { + "epoch": 0.13051910538912273, + "grad_norm": 4.418619019287955, + "learning_rate": 2.9212655046536893e-06, + "loss": 0.5439, + "step": 3163 + }, + { + "epoch": 0.13056036972848065, + "grad_norm": 2.456608186385367, + "learning_rate": 2.921201394742221e-06, + "loss": 0.591, + "step": 3164 + }, + { + "epoch": 0.13060163406783856, + "grad_norm": 3.6513860891600975, + "learning_rate": 2.9211372594445403e-06, + "loss": 0.6256, + "step": 3165 + }, + { + "epoch": 0.1306428984071965, + "grad_norm": 7.202440932175399, + "learning_rate": 2.9210730987617934e-06, + "loss": 0.5989, + "step": 3166 + }, + { + "epoch": 0.13068416274655442, + "grad_norm": 3.6773987212446424, + "learning_rate": 2.921008912695126e-06, + "loss": 0.5542, + "step": 3167 + }, + { + "epoch": 0.13072542708591237, + "grad_norm": 3.258561993946731, + "learning_rate": 2.920944701245684e-06, + "loss": 0.593, + "step": 3168 + }, + { + "epoch": 0.13076669142527028, + "grad_norm": 3.782954442308292, + "learning_rate": 2.9208804644146156e-06, + "loss": 0.5481, + "step": 3169 + }, + { + "epoch": 0.1308079557646282, + "grad_norm": 4.412932524033493, + "learning_rate": 2.9208162022030674e-06, + "loss": 0.5324, + "step": 3170 + }, + { + "epoch": 0.13084922010398614, + "grad_norm": 8.14503849568935, + "learning_rate": 2.9207519146121875e-06, + "loss": 0.5443, + "step": 3171 + }, + { + "epoch": 0.13089048444334406, + "grad_norm": 6.17401163722345, + "learning_rate": 2.920687601643124e-06, + "loss": 0.5303, + "step": 3172 + }, + { + "epoch": 0.130931748782702, + "grad_norm": 10.95974215228783, + "learning_rate": 2.9206232632970263e-06, + "loss": 0.5945, + "step": 3173 + }, + { + "epoch": 0.13097301312205992, + "grad_norm": 4.364289251073342, + "learning_rate": 2.9205588995750428e-06, + "loss": 0.5605, + "step": 3174 + }, + { + "epoch": 0.13101427746141783, + "grad_norm": 2.850803613360053, + "learning_rate": 2.9204945104783238e-06, + "loss": 0.5563, + "step": 3175 + }, + { + "epoch": 0.13105554180077578, + "grad_norm": 12.75910156149929, + "learning_rate": 2.9204300960080192e-06, + "loss": 0.564, + "step": 3176 + }, + { + "epoch": 0.1310968061401337, + "grad_norm": 3.6001466256654524, + "learning_rate": 2.9203656561652802e-06, + "loss": 0.5906, + "step": 3177 + }, + { + "epoch": 0.13113807047949164, + "grad_norm": 3.436663400087807, + "learning_rate": 2.920301190951257e-06, + "loss": 0.5083, + "step": 3178 + }, + { + "epoch": 0.13117933481884955, + "grad_norm": 2.9360031839643397, + "learning_rate": 2.9202367003671017e-06, + "loss": 0.5479, + "step": 3179 + }, + { + "epoch": 0.13122059915820747, + "grad_norm": 41.661826771428025, + "learning_rate": 2.9201721844139657e-06, + "loss": 0.5582, + "step": 3180 + }, + { + "epoch": 0.1312618634975654, + "grad_norm": 253.7777919581542, + "learning_rate": 2.920107643093002e-06, + "loss": 0.551, + "step": 3181 + }, + { + "epoch": 0.13130312783692333, + "grad_norm": 5.9274995715633, + "learning_rate": 2.9200430764053633e-06, + "loss": 0.5371, + "step": 3182 + }, + { + "epoch": 0.13134439217628127, + "grad_norm": 4.986548949172771, + "learning_rate": 2.919978484352203e-06, + "loss": 0.5335, + "step": 3183 + }, + { + "epoch": 0.1313856565156392, + "grad_norm": 9.998972219659853, + "learning_rate": 2.919913866934674e-06, + "loss": 0.5982, + "step": 3184 + }, + { + "epoch": 0.1314269208549971, + "grad_norm": 2.624883320865526, + "learning_rate": 2.919849224153932e-06, + "loss": 0.5772, + "step": 3185 + }, + { + "epoch": 0.13146818519435505, + "grad_norm": 2.7248116255866313, + "learning_rate": 2.9197845560111303e-06, + "loss": 0.5245, + "step": 3186 + }, + { + "epoch": 0.13150944953371296, + "grad_norm": 4.278174560897937, + "learning_rate": 2.9197198625074253e-06, + "loss": 0.5695, + "step": 3187 + }, + { + "epoch": 0.13155071387307088, + "grad_norm": 2.300184820547701, + "learning_rate": 2.919655143643972e-06, + "loss": 0.535, + "step": 3188 + }, + { + "epoch": 0.13159197821242882, + "grad_norm": 3.433298293413415, + "learning_rate": 2.9195903994219257e-06, + "loss": 0.5981, + "step": 3189 + }, + { + "epoch": 0.13163324255178674, + "grad_norm": 2.834181861537045, + "learning_rate": 2.9195256298424444e-06, + "loss": 0.5904, + "step": 3190 + }, + { + "epoch": 0.13167450689114468, + "grad_norm": 3.9673946405699674, + "learning_rate": 2.9194608349066837e-06, + "loss": 0.6164, + "step": 3191 + }, + { + "epoch": 0.1317157712305026, + "grad_norm": 2.2251132552658084, + "learning_rate": 2.9193960146158015e-06, + "loss": 0.5411, + "step": 3192 + }, + { + "epoch": 0.13175703556986051, + "grad_norm": 9.73229501936738, + "learning_rate": 2.9193311689709557e-06, + "loss": 0.5618, + "step": 3193 + }, + { + "epoch": 0.13179829990921846, + "grad_norm": 1.8552007072509886, + "learning_rate": 2.9192662979733054e-06, + "loss": 0.5065, + "step": 3194 + }, + { + "epoch": 0.13183956424857637, + "grad_norm": 7.594515975605915, + "learning_rate": 2.9192014016240077e-06, + "loss": 0.5835, + "step": 3195 + }, + { + "epoch": 0.13188082858793432, + "grad_norm": 4.686483629282592, + "learning_rate": 2.919136479924223e-06, + "loss": 0.6172, + "step": 3196 + }, + { + "epoch": 0.13192209292729223, + "grad_norm": 2.668677101474897, + "learning_rate": 2.91907153287511e-06, + "loss": 0.5022, + "step": 3197 + }, + { + "epoch": 0.13196335726665015, + "grad_norm": 2.6837060723583863, + "learning_rate": 2.9190065604778308e-06, + "loss": 0.5906, + "step": 3198 + }, + { + "epoch": 0.1320046216060081, + "grad_norm": 3.2715910528529215, + "learning_rate": 2.9189415627335436e-06, + "loss": 0.602, + "step": 3199 + }, + { + "epoch": 0.132045885945366, + "grad_norm": 2.7521382518213215, + "learning_rate": 2.9188765396434103e-06, + "loss": 0.5433, + "step": 3200 + }, + { + "epoch": 0.13208715028472395, + "grad_norm": 15.53982492019838, + "learning_rate": 2.918811491208593e-06, + "loss": 0.5749, + "step": 3201 + }, + { + "epoch": 0.13212841462408187, + "grad_norm": 2.724930457530622, + "learning_rate": 2.918746417430253e-06, + "loss": 0.5674, + "step": 3202 + }, + { + "epoch": 0.13216967896343979, + "grad_norm": 5.041140615331662, + "learning_rate": 2.918681318309553e-06, + "loss": 0.5936, + "step": 3203 + }, + { + "epoch": 0.13221094330279773, + "grad_norm": 5.815659270534979, + "learning_rate": 2.918616193847655e-06, + "loss": 0.6092, + "step": 3204 + }, + { + "epoch": 0.13225220764215564, + "grad_norm": 4.504342106267951, + "learning_rate": 2.918551044045724e-06, + "loss": 0.6545, + "step": 3205 + }, + { + "epoch": 0.1322934719815136, + "grad_norm": 3.2283481242889485, + "learning_rate": 2.918485868904922e-06, + "loss": 0.5491, + "step": 3206 + }, + { + "epoch": 0.1323347363208715, + "grad_norm": 5.892136638011336, + "learning_rate": 2.918420668426414e-06, + "loss": 0.5449, + "step": 3207 + }, + { + "epoch": 0.13237600066022942, + "grad_norm": 10.714427265109578, + "learning_rate": 2.9183554426113645e-06, + "loss": 0.5559, + "step": 3208 + }, + { + "epoch": 0.13241726499958736, + "grad_norm": 3.6024387791571404, + "learning_rate": 2.918290191460939e-06, + "loss": 0.5955, + "step": 3209 + }, + { + "epoch": 0.13245852933894528, + "grad_norm": 3.3552727283575527, + "learning_rate": 2.918224914976302e-06, + "loss": 0.5002, + "step": 3210 + }, + { + "epoch": 0.13249979367830322, + "grad_norm": 3.4364586349773045, + "learning_rate": 2.9181596131586207e-06, + "loss": 0.5403, + "step": 3211 + }, + { + "epoch": 0.13254105801766114, + "grad_norm": 4.935358117264414, + "learning_rate": 2.9180942860090606e-06, + "loss": 0.5643, + "step": 3212 + }, + { + "epoch": 0.13258232235701906, + "grad_norm": 5.178062936509352, + "learning_rate": 2.9180289335287888e-06, + "loss": 0.5418, + "step": 3213 + }, + { + "epoch": 0.132623586696377, + "grad_norm": 6.814793774298483, + "learning_rate": 2.9179635557189733e-06, + "loss": 0.5435, + "step": 3214 + }, + { + "epoch": 0.13266485103573492, + "grad_norm": 8.332489144386445, + "learning_rate": 2.9178981525807814e-06, + "loss": 0.5518, + "step": 3215 + }, + { + "epoch": 0.13270611537509283, + "grad_norm": 4.28947523974979, + "learning_rate": 2.917832724115382e-06, + "loss": 0.5614, + "step": 3216 + }, + { + "epoch": 0.13274737971445078, + "grad_norm": 3.499669281118748, + "learning_rate": 2.9177672703239426e-06, + "loss": 0.6024, + "step": 3217 + }, + { + "epoch": 0.1327886440538087, + "grad_norm": 3.3313258535572534, + "learning_rate": 2.9177017912076335e-06, + "loss": 0.6101, + "step": 3218 + }, + { + "epoch": 0.13282990839316663, + "grad_norm": 3.682232511059502, + "learning_rate": 2.917636286767623e-06, + "loss": 0.5891, + "step": 3219 + }, + { + "epoch": 0.13287117273252455, + "grad_norm": 3.2026460029573944, + "learning_rate": 2.917570757005083e-06, + "loss": 0.5558, + "step": 3220 + }, + { + "epoch": 0.13291243707188247, + "grad_norm": 4.1962527788212824, + "learning_rate": 2.9175052019211828e-06, + "loss": 0.6272, + "step": 3221 + }, + { + "epoch": 0.1329537014112404, + "grad_norm": 19.692705226294, + "learning_rate": 2.9174396215170936e-06, + "loss": 0.5331, + "step": 3222 + }, + { + "epoch": 0.13299496575059833, + "grad_norm": 10.356496165336965, + "learning_rate": 2.9173740157939868e-06, + "loss": 0.5614, + "step": 3223 + }, + { + "epoch": 0.13303623008995627, + "grad_norm": 3.379625773729601, + "learning_rate": 2.9173083847530342e-06, + "loss": 0.5632, + "step": 3224 + }, + { + "epoch": 0.13307749442931419, + "grad_norm": 3.8849627323006337, + "learning_rate": 2.9172427283954083e-06, + "loss": 0.5458, + "step": 3225 + }, + { + "epoch": 0.1331187587686721, + "grad_norm": 4.234491816440932, + "learning_rate": 2.917177046722282e-06, + "loss": 0.5744, + "step": 3226 + }, + { + "epoch": 0.13316002310803005, + "grad_norm": 5.339880791590832, + "learning_rate": 2.9171113397348283e-06, + "loss": 0.563, + "step": 3227 + }, + { + "epoch": 0.13320128744738796, + "grad_norm": 3.48264019358461, + "learning_rate": 2.9170456074342214e-06, + "loss": 0.622, + "step": 3228 + }, + { + "epoch": 0.1332425517867459, + "grad_norm": 2.564917426277787, + "learning_rate": 2.9169798498216345e-06, + "loss": 0.5719, + "step": 3229 + }, + { + "epoch": 0.13328381612610382, + "grad_norm": 3.082459796796162, + "learning_rate": 2.9169140668982437e-06, + "loss": 0.624, + "step": 3230 + }, + { + "epoch": 0.13332508046546174, + "grad_norm": 2.760654461923815, + "learning_rate": 2.9168482586652217e-06, + "loss": 0.5554, + "step": 3231 + }, + { + "epoch": 0.13336634480481968, + "grad_norm": 3.3282775419878847, + "learning_rate": 2.9167824251237463e-06, + "loss": 0.5633, + "step": 3232 + }, + { + "epoch": 0.1334076091441776, + "grad_norm": 24.54077609930086, + "learning_rate": 2.9167165662749923e-06, + "loss": 0.5896, + "step": 3233 + }, + { + "epoch": 0.13344887348353554, + "grad_norm": 2.1519559036321567, + "learning_rate": 2.916650682120137e-06, + "loss": 0.5706, + "step": 3234 + }, + { + "epoch": 0.13349013782289346, + "grad_norm": 8.689008021138376, + "learning_rate": 2.9165847726603553e-06, + "loss": 0.5976, + "step": 3235 + }, + { + "epoch": 0.13353140216225137, + "grad_norm": 2.637216007348692, + "learning_rate": 2.9165188378968266e-06, + "loss": 0.5884, + "step": 3236 + }, + { + "epoch": 0.13357266650160932, + "grad_norm": 2.6176412386085564, + "learning_rate": 2.916452877830728e-06, + "loss": 0.5709, + "step": 3237 + }, + { + "epoch": 0.13361393084096723, + "grad_norm": 2.7440583247493078, + "learning_rate": 2.9163868924632373e-06, + "loss": 0.548, + "step": 3238 + }, + { + "epoch": 0.13365519518032518, + "grad_norm": 9.255750845751384, + "learning_rate": 2.9163208817955335e-06, + "loss": 0.5515, + "step": 3239 + }, + { + "epoch": 0.1336964595196831, + "grad_norm": 7.196434321443492, + "learning_rate": 2.9162548458287954e-06, + "loss": 0.5204, + "step": 3240 + }, + { + "epoch": 0.133737723859041, + "grad_norm": 2.3391758855158793, + "learning_rate": 2.9161887845642032e-06, + "loss": 0.5624, + "step": 3241 + }, + { + "epoch": 0.13377898819839895, + "grad_norm": 2.455894159220836, + "learning_rate": 2.9161226980029365e-06, + "loss": 0.5885, + "step": 3242 + }, + { + "epoch": 0.13382025253775687, + "grad_norm": 3.323528849122468, + "learning_rate": 2.9160565861461756e-06, + "loss": 0.5739, + "step": 3243 + }, + { + "epoch": 0.1338615168771148, + "grad_norm": 6.416215744316517, + "learning_rate": 2.9159904489951017e-06, + "loss": 0.6151, + "step": 3244 + }, + { + "epoch": 0.13390278121647273, + "grad_norm": 4.889533658672917, + "learning_rate": 2.9159242865508964e-06, + "loss": 0.5394, + "step": 3245 + }, + { + "epoch": 0.13394404555583064, + "grad_norm": 2.55254220519386, + "learning_rate": 2.915858098814741e-06, + "loss": 0.5957, + "step": 3246 + }, + { + "epoch": 0.1339853098951886, + "grad_norm": 6.3368773093929285, + "learning_rate": 2.915791885787818e-06, + "loss": 0.602, + "step": 3247 + }, + { + "epoch": 0.1340265742345465, + "grad_norm": 2.9995559683327975, + "learning_rate": 2.91572564747131e-06, + "loss": 0.5595, + "step": 3248 + }, + { + "epoch": 0.13406783857390442, + "grad_norm": 5.885439093128451, + "learning_rate": 2.915659383866401e-06, + "loss": 0.5837, + "step": 3249 + }, + { + "epoch": 0.13410910291326236, + "grad_norm": 2.288794204632721, + "learning_rate": 2.915593094974273e-06, + "loss": 0.5245, + "step": 3250 + }, + { + "epoch": 0.13415036725262028, + "grad_norm": 4.946903764584157, + "learning_rate": 2.9155267807961115e-06, + "loss": 0.5676, + "step": 3251 + }, + { + "epoch": 0.13419163159197822, + "grad_norm": 3.314511146544617, + "learning_rate": 2.915460441333101e-06, + "loss": 0.5472, + "step": 3252 + }, + { + "epoch": 0.13423289593133614, + "grad_norm": 5.708305902600396, + "learning_rate": 2.9153940765864257e-06, + "loss": 0.5745, + "step": 3253 + }, + { + "epoch": 0.13427416027069405, + "grad_norm": 3.586502664807834, + "learning_rate": 2.915327686557271e-06, + "loss": 0.6125, + "step": 3254 + }, + { + "epoch": 0.134315424610052, + "grad_norm": 13.25355810775844, + "learning_rate": 2.9152612712468238e-06, + "loss": 0.5613, + "step": 3255 + }, + { + "epoch": 0.1343566889494099, + "grad_norm": 2.9312986894417272, + "learning_rate": 2.91519483065627e-06, + "loss": 0.5483, + "step": 3256 + }, + { + "epoch": 0.13439795328876786, + "grad_norm": 5.988272923207493, + "learning_rate": 2.9151283647867956e-06, + "loss": 0.5737, + "step": 3257 + }, + { + "epoch": 0.13443921762812577, + "grad_norm": 3.3010203102838123, + "learning_rate": 2.915061873639589e-06, + "loss": 0.531, + "step": 3258 + }, + { + "epoch": 0.1344804819674837, + "grad_norm": 4.730033803787664, + "learning_rate": 2.914995357215837e-06, + "loss": 0.4972, + "step": 3259 + }, + { + "epoch": 0.13452174630684163, + "grad_norm": 4.469574153101145, + "learning_rate": 2.914928815516729e-06, + "loss": 0.516, + "step": 3260 + }, + { + "epoch": 0.13456301064619955, + "grad_norm": 16.719988431287764, + "learning_rate": 2.9148622485434518e-06, + "loss": 0.5611, + "step": 3261 + }, + { + "epoch": 0.1346042749855575, + "grad_norm": 3.56210101698892, + "learning_rate": 2.914795656297196e-06, + "loss": 0.5137, + "step": 3262 + }, + { + "epoch": 0.1346455393249154, + "grad_norm": 3.7664760591104205, + "learning_rate": 2.91472903877915e-06, + "loss": 0.5469, + "step": 3263 + }, + { + "epoch": 0.13468680366427332, + "grad_norm": 8.246662081558373, + "learning_rate": 2.9146623959905045e-06, + "loss": 0.6336, + "step": 3264 + }, + { + "epoch": 0.13472806800363127, + "grad_norm": 3.7386644799203923, + "learning_rate": 2.9145957279324497e-06, + "loss": 0.6018, + "step": 3265 + }, + { + "epoch": 0.13476933234298918, + "grad_norm": 3.8283610698544415, + "learning_rate": 2.914529034606176e-06, + "loss": 0.5804, + "step": 3266 + }, + { + "epoch": 0.13481059668234713, + "grad_norm": 3.6508866217439433, + "learning_rate": 2.9144623160128756e-06, + "loss": 0.5418, + "step": 3267 + }, + { + "epoch": 0.13485186102170504, + "grad_norm": 4.229259582269926, + "learning_rate": 2.91439557215374e-06, + "loss": 0.5423, + "step": 3268 + }, + { + "epoch": 0.13489312536106296, + "grad_norm": 3.004021744356206, + "learning_rate": 2.9143288030299606e-06, + "loss": 0.5755, + "step": 3269 + }, + { + "epoch": 0.1349343897004209, + "grad_norm": 2.709596560734456, + "learning_rate": 2.914262008642731e-06, + "loss": 0.573, + "step": 3270 + }, + { + "epoch": 0.13497565403977882, + "grad_norm": 4.691426559413002, + "learning_rate": 2.914195188993244e-06, + "loss": 0.5981, + "step": 3271 + }, + { + "epoch": 0.13501691837913676, + "grad_norm": 26.077972057055163, + "learning_rate": 2.914128344082693e-06, + "loss": 0.5265, + "step": 3272 + }, + { + "epoch": 0.13505818271849468, + "grad_norm": 12.239533078145026, + "learning_rate": 2.9140614739122724e-06, + "loss": 0.5516, + "step": 3273 + }, + { + "epoch": 0.1350994470578526, + "grad_norm": 21.032970479961037, + "learning_rate": 2.9139945784831764e-06, + "loss": 0.5514, + "step": 3274 + }, + { + "epoch": 0.13514071139721054, + "grad_norm": 3.6454263303941947, + "learning_rate": 2.9139276577965998e-06, + "loss": 0.5641, + "step": 3275 + }, + { + "epoch": 0.13518197573656845, + "grad_norm": 3.2011016660361706, + "learning_rate": 2.913860711853738e-06, + "loss": 0.4972, + "step": 3276 + }, + { + "epoch": 0.13522324007592637, + "grad_norm": 4.877272835701043, + "learning_rate": 2.9137937406557874e-06, + "loss": 0.5914, + "step": 3277 + }, + { + "epoch": 0.13526450441528431, + "grad_norm": 3.9025473623131473, + "learning_rate": 2.9137267442039437e-06, + "loss": 0.5576, + "step": 3278 + }, + { + "epoch": 0.13530576875464223, + "grad_norm": 7.115469418168469, + "learning_rate": 2.9136597224994034e-06, + "loss": 0.5676, + "step": 3279 + }, + { + "epoch": 0.13534703309400017, + "grad_norm": 3.506339253936747, + "learning_rate": 2.9135926755433646e-06, + "loss": 0.5662, + "step": 3280 + }, + { + "epoch": 0.1353882974333581, + "grad_norm": 3.287965391805822, + "learning_rate": 2.9135256033370233e-06, + "loss": 0.575, + "step": 3281 + }, + { + "epoch": 0.135429561772716, + "grad_norm": 3.3495388015767475, + "learning_rate": 2.91345850588158e-06, + "loss": 0.5567, + "step": 3282 + }, + { + "epoch": 0.13547082611207395, + "grad_norm": 2.937604662379353, + "learning_rate": 2.9133913831782307e-06, + "loss": 0.5544, + "step": 3283 + }, + { + "epoch": 0.13551209045143187, + "grad_norm": 5.383661056446271, + "learning_rate": 2.913324235228176e-06, + "loss": 0.5352, + "step": 3284 + }, + { + "epoch": 0.1355533547907898, + "grad_norm": 4.600068762575951, + "learning_rate": 2.913257062032615e-06, + "loss": 0.5685, + "step": 3285 + }, + { + "epoch": 0.13559461913014773, + "grad_norm": 2.983939984123444, + "learning_rate": 2.9131898635927474e-06, + "loss": 0.5987, + "step": 3286 + }, + { + "epoch": 0.13563588346950564, + "grad_norm": 3.196413479427278, + "learning_rate": 2.9131226399097733e-06, + "loss": 0.5623, + "step": 3287 + }, + { + "epoch": 0.13567714780886359, + "grad_norm": 2.3758346819456486, + "learning_rate": 2.9130553909848938e-06, + "loss": 0.5355, + "step": 3288 + }, + { + "epoch": 0.1357184121482215, + "grad_norm": 2.358730515426238, + "learning_rate": 2.9129881168193104e-06, + "loss": 0.5763, + "step": 3289 + }, + { + "epoch": 0.13575967648757944, + "grad_norm": 3.8315442690912858, + "learning_rate": 2.9129208174142242e-06, + "loss": 0.5773, + "step": 3290 + }, + { + "epoch": 0.13580094082693736, + "grad_norm": 5.650363127274728, + "learning_rate": 2.912853492770838e-06, + "loss": 0.5913, + "step": 3291 + }, + { + "epoch": 0.13584220516629528, + "grad_norm": 8.124828131941122, + "learning_rate": 2.912786142890354e-06, + "loss": 0.5957, + "step": 3292 + }, + { + "epoch": 0.13588346950565322, + "grad_norm": 2.28058817469999, + "learning_rate": 2.9127187677739747e-06, + "loss": 0.5278, + "step": 3293 + }, + { + "epoch": 0.13592473384501114, + "grad_norm": 4.306734006620138, + "learning_rate": 2.9126513674229044e-06, + "loss": 0.6154, + "step": 3294 + }, + { + "epoch": 0.13596599818436908, + "grad_norm": 2.718598490842655, + "learning_rate": 2.9125839418383466e-06, + "loss": 0.5465, + "step": 3295 + }, + { + "epoch": 0.136007262523727, + "grad_norm": 8.389788258275477, + "learning_rate": 2.912516491021506e-06, + "loss": 0.5929, + "step": 3296 + }, + { + "epoch": 0.1360485268630849, + "grad_norm": 4.237508061161302, + "learning_rate": 2.9124490149735873e-06, + "loss": 0.5528, + "step": 3297 + }, + { + "epoch": 0.13608979120244286, + "grad_norm": 2.6477661505409116, + "learning_rate": 2.9123815136957957e-06, + "loss": 0.5416, + "step": 3298 + }, + { + "epoch": 0.13613105554180077, + "grad_norm": 9.152341849734189, + "learning_rate": 2.9123139871893373e-06, + "loss": 0.5376, + "step": 3299 + }, + { + "epoch": 0.13617231988115872, + "grad_norm": 3.148077107483742, + "learning_rate": 2.912246435455418e-06, + "loss": 0.5936, + "step": 3300 + }, + { + "epoch": 0.13621358422051663, + "grad_norm": 3.8465188291347796, + "learning_rate": 2.912178858495244e-06, + "loss": 0.5422, + "step": 3301 + }, + { + "epoch": 0.13625484855987455, + "grad_norm": 3.3588984860656264, + "learning_rate": 2.912111256310023e-06, + "loss": 0.542, + "step": 3302 + }, + { + "epoch": 0.1362961128992325, + "grad_norm": 4.470437795262911, + "learning_rate": 2.912043628900963e-06, + "loss": 0.5964, + "step": 3303 + }, + { + "epoch": 0.1363373772385904, + "grad_norm": 6.917288167741323, + "learning_rate": 2.911975976269271e-06, + "loss": 0.5603, + "step": 3304 + }, + { + "epoch": 0.13637864157794835, + "grad_norm": 4.434828592364316, + "learning_rate": 2.911908298416155e-06, + "loss": 0.5716, + "step": 3305 + }, + { + "epoch": 0.13641990591730627, + "grad_norm": 5.979614847408268, + "learning_rate": 2.9118405953428258e-06, + "loss": 0.5596, + "step": 3306 + }, + { + "epoch": 0.13646117025666418, + "grad_norm": 4.423197823505594, + "learning_rate": 2.911772867050492e-06, + "loss": 0.5421, + "step": 3307 + }, + { + "epoch": 0.13650243459602213, + "grad_norm": 4.132391751117675, + "learning_rate": 2.9117051135403624e-06, + "loss": 0.6205, + "step": 3308 + }, + { + "epoch": 0.13654369893538004, + "grad_norm": 38.48792608901185, + "learning_rate": 2.911637334813648e-06, + "loss": 0.5558, + "step": 3309 + }, + { + "epoch": 0.13658496327473796, + "grad_norm": 4.79327089676033, + "learning_rate": 2.9115695308715592e-06, + "loss": 0.5701, + "step": 3310 + }, + { + "epoch": 0.1366262276140959, + "grad_norm": 3.8777230713941644, + "learning_rate": 2.911501701715308e-06, + "loss": 0.6248, + "step": 3311 + }, + { + "epoch": 0.13666749195345382, + "grad_norm": 3.9341439031552565, + "learning_rate": 2.9114338473461043e-06, + "loss": 0.5682, + "step": 3312 + }, + { + "epoch": 0.13670875629281176, + "grad_norm": 4.669907220016936, + "learning_rate": 2.9113659677651622e-06, + "loss": 0.618, + "step": 3313 + }, + { + "epoch": 0.13675002063216968, + "grad_norm": 3.6778133849909307, + "learning_rate": 2.911298062973693e-06, + "loss": 0.6063, + "step": 3314 + }, + { + "epoch": 0.1367912849715276, + "grad_norm": 3.7213457277682953, + "learning_rate": 2.91123013297291e-06, + "loss": 0.5709, + "step": 3315 + }, + { + "epoch": 0.13683254931088554, + "grad_norm": 5.9725502385914275, + "learning_rate": 2.911162177764026e-06, + "loss": 0.5373, + "step": 3316 + }, + { + "epoch": 0.13687381365024345, + "grad_norm": 7.376039032726124, + "learning_rate": 2.9110941973482555e-06, + "loss": 0.571, + "step": 3317 + }, + { + "epoch": 0.1369150779896014, + "grad_norm": 3.4068260475127516, + "learning_rate": 2.9110261917268127e-06, + "loss": 0.5608, + "step": 3318 + }, + { + "epoch": 0.1369563423289593, + "grad_norm": 6.058871741531454, + "learning_rate": 2.9109581609009123e-06, + "loss": 0.6333, + "step": 3319 + }, + { + "epoch": 0.13699760666831723, + "grad_norm": 2.7282422879019497, + "learning_rate": 2.9108901048717693e-06, + "loss": 0.5606, + "step": 3320 + }, + { + "epoch": 0.13703887100767517, + "grad_norm": 5.860541550050915, + "learning_rate": 2.9108220236405997e-06, + "loss": 0.5243, + "step": 3321 + }, + { + "epoch": 0.1370801353470331, + "grad_norm": 4.236782240916143, + "learning_rate": 2.9107539172086197e-06, + "loss": 0.583, + "step": 3322 + }, + { + "epoch": 0.13712139968639103, + "grad_norm": 4.4760197785237725, + "learning_rate": 2.9106857855770454e-06, + "loss": 0.5955, + "step": 3323 + }, + { + "epoch": 0.13716266402574895, + "grad_norm": 8.334494468118928, + "learning_rate": 2.9106176287470934e-06, + "loss": 0.5769, + "step": 3324 + }, + { + "epoch": 0.13720392836510686, + "grad_norm": 3.0536271449746155, + "learning_rate": 2.9105494467199826e-06, + "loss": 0.5645, + "step": 3325 + }, + { + "epoch": 0.1372451927044648, + "grad_norm": 3.497639533968754, + "learning_rate": 2.9104812394969294e-06, + "loss": 0.5505, + "step": 3326 + }, + { + "epoch": 0.13728645704382272, + "grad_norm": 4.310625749366706, + "learning_rate": 2.910413007079153e-06, + "loss": 0.574, + "step": 3327 + }, + { + "epoch": 0.13732772138318067, + "grad_norm": 2.8145821049435535, + "learning_rate": 2.9103447494678717e-06, + "loss": 0.5566, + "step": 3328 + }, + { + "epoch": 0.13736898572253858, + "grad_norm": 3.6488430312274414, + "learning_rate": 2.910276466664305e-06, + "loss": 0.5919, + "step": 3329 + }, + { + "epoch": 0.1374102500618965, + "grad_norm": 4.141591639662474, + "learning_rate": 2.910208158669673e-06, + "loss": 0.5364, + "step": 3330 + }, + { + "epoch": 0.13745151440125444, + "grad_norm": 3.048581949974748, + "learning_rate": 2.9101398254851955e-06, + "loss": 0.5504, + "step": 3331 + }, + { + "epoch": 0.13749277874061236, + "grad_norm": 6.045941550780148, + "learning_rate": 2.9100714671120928e-06, + "loss": 0.5931, + "step": 3332 + }, + { + "epoch": 0.1375340430799703, + "grad_norm": 2.444910047212411, + "learning_rate": 2.9100030835515865e-06, + "loss": 0.4793, + "step": 3333 + }, + { + "epoch": 0.13757530741932822, + "grad_norm": 6.788326665461611, + "learning_rate": 2.9099346748048982e-06, + "loss": 0.5644, + "step": 3334 + }, + { + "epoch": 0.13761657175868613, + "grad_norm": 4.51726097765853, + "learning_rate": 2.9098662408732484e-06, + "loss": 0.58, + "step": 3335 + }, + { + "epoch": 0.13765783609804408, + "grad_norm": 3.508562202092416, + "learning_rate": 2.9097977817578615e-06, + "loss": 0.5502, + "step": 3336 + }, + { + "epoch": 0.137699100437402, + "grad_norm": 2.855706321970712, + "learning_rate": 2.9097292974599594e-06, + "loss": 0.5391, + "step": 3337 + }, + { + "epoch": 0.1377403647767599, + "grad_norm": 3.4590820765986194, + "learning_rate": 2.909660787980765e-06, + "loss": 0.537, + "step": 3338 + }, + { + "epoch": 0.13778162911611785, + "grad_norm": 10.350790785584957, + "learning_rate": 2.9095922533215026e-06, + "loss": 0.6053, + "step": 3339 + }, + { + "epoch": 0.13782289345547577, + "grad_norm": 4.892383702399208, + "learning_rate": 2.9095236934833968e-06, + "loss": 0.5939, + "step": 3340 + }, + { + "epoch": 0.1378641577948337, + "grad_norm": 3.9359706278110584, + "learning_rate": 2.909455108467671e-06, + "loss": 0.5673, + "step": 3341 + }, + { + "epoch": 0.13790542213419163, + "grad_norm": 9.202763007226336, + "learning_rate": 2.9093864982755517e-06, + "loss": 0.5414, + "step": 3342 + }, + { + "epoch": 0.13794668647354955, + "grad_norm": 2.9231430595462347, + "learning_rate": 2.9093178629082637e-06, + "loss": 0.5364, + "step": 3343 + }, + { + "epoch": 0.1379879508129075, + "grad_norm": 6.237088533299317, + "learning_rate": 2.909249202367033e-06, + "loss": 0.5446, + "step": 3344 + }, + { + "epoch": 0.1380292151522654, + "grad_norm": 3.1305793540282574, + "learning_rate": 2.9091805166530858e-06, + "loss": 0.5961, + "step": 3345 + }, + { + "epoch": 0.13807047949162335, + "grad_norm": 1.92722941764361, + "learning_rate": 2.9091118057676496e-06, + "loss": 0.5289, + "step": 3346 + }, + { + "epoch": 0.13811174383098127, + "grad_norm": 4.226767036059691, + "learning_rate": 2.909043069711952e-06, + "loss": 0.5426, + "step": 3347 + }, + { + "epoch": 0.13815300817033918, + "grad_norm": 3.0131798932498204, + "learning_rate": 2.9089743084872193e-06, + "loss": 0.5826, + "step": 3348 + }, + { + "epoch": 0.13819427250969712, + "grad_norm": 7.480387719277776, + "learning_rate": 2.908905522094681e-06, + "loss": 0.5948, + "step": 3349 + }, + { + "epoch": 0.13823553684905504, + "grad_norm": 2.7337511457437955, + "learning_rate": 2.9088367105355658e-06, + "loss": 0.588, + "step": 3350 + }, + { + "epoch": 0.13827680118841298, + "grad_norm": 4.321605483357169, + "learning_rate": 2.9087678738111025e-06, + "loss": 0.6725, + "step": 3351 + }, + { + "epoch": 0.1383180655277709, + "grad_norm": 2.374344603644764, + "learning_rate": 2.908699011922521e-06, + "loss": 0.6036, + "step": 3352 + }, + { + "epoch": 0.13835932986712882, + "grad_norm": 4.1912986380468515, + "learning_rate": 2.908630124871051e-06, + "loss": 0.5743, + "step": 3353 + }, + { + "epoch": 0.13840059420648676, + "grad_norm": 2.641157553764381, + "learning_rate": 2.908561212657923e-06, + "loss": 0.6356, + "step": 3354 + }, + { + "epoch": 0.13844185854584468, + "grad_norm": 3.2254165280905696, + "learning_rate": 2.908492275284368e-06, + "loss": 0.5107, + "step": 3355 + }, + { + "epoch": 0.13848312288520262, + "grad_norm": 3.7645244590482876, + "learning_rate": 2.9084233127516175e-06, + "loss": 0.6313, + "step": 3356 + }, + { + "epoch": 0.13852438722456054, + "grad_norm": 3.4144242813290453, + "learning_rate": 2.908354325060903e-06, + "loss": 0.5046, + "step": 3357 + }, + { + "epoch": 0.13856565156391845, + "grad_norm": 2.9021874860245243, + "learning_rate": 2.9082853122134576e-06, + "loss": 0.5125, + "step": 3358 + }, + { + "epoch": 0.1386069159032764, + "grad_norm": 5.007053884514853, + "learning_rate": 2.908216274210514e-06, + "loss": 0.5793, + "step": 3359 + }, + { + "epoch": 0.1386481802426343, + "grad_norm": 11.734810652155849, + "learning_rate": 2.9081472110533038e-06, + "loss": 0.5612, + "step": 3360 + }, + { + "epoch": 0.13868944458199226, + "grad_norm": 3.892401830092631, + "learning_rate": 2.9080781227430624e-06, + "loss": 0.5996, + "step": 3361 + }, + { + "epoch": 0.13873070892135017, + "grad_norm": 2.820843606535375, + "learning_rate": 2.9080090092810234e-06, + "loss": 0.604, + "step": 3362 + }, + { + "epoch": 0.1387719732607081, + "grad_norm": 3.8081003127786732, + "learning_rate": 2.9079398706684205e-06, + "loss": 0.5766, + "step": 3363 + }, + { + "epoch": 0.13881323760006603, + "grad_norm": 3.637383621475172, + "learning_rate": 2.90787070690649e-06, + "loss": 0.5833, + "step": 3364 + }, + { + "epoch": 0.13885450193942395, + "grad_norm": 5.448369717420117, + "learning_rate": 2.9078015179964667e-06, + "loss": 0.5799, + "step": 3365 + }, + { + "epoch": 0.1388957662787819, + "grad_norm": 3.8520786301777976, + "learning_rate": 2.9077323039395862e-06, + "loss": 0.5494, + "step": 3366 + }, + { + "epoch": 0.1389370306181398, + "grad_norm": 15.934325342644211, + "learning_rate": 2.9076630647370855e-06, + "loss": 0.5508, + "step": 3367 + }, + { + "epoch": 0.13897829495749772, + "grad_norm": 3.887760360657104, + "learning_rate": 2.907593800390201e-06, + "loss": 0.4943, + "step": 3368 + }, + { + "epoch": 0.13901955929685567, + "grad_norm": 2.949340948240689, + "learning_rate": 2.9075245109001693e-06, + "loss": 0.5475, + "step": 3369 + }, + { + "epoch": 0.13906082363621358, + "grad_norm": 2.4052495535064886, + "learning_rate": 2.9074551962682293e-06, + "loss": 0.5434, + "step": 3370 + }, + { + "epoch": 0.1391020879755715, + "grad_norm": 5.1159165637592166, + "learning_rate": 2.9073858564956187e-06, + "loss": 0.5567, + "step": 3371 + }, + { + "epoch": 0.13914335231492944, + "grad_norm": 6.534243677463511, + "learning_rate": 2.9073164915835755e-06, + "loss": 0.5963, + "step": 3372 + }, + { + "epoch": 0.13918461665428736, + "grad_norm": 2.285080627735216, + "learning_rate": 2.9072471015333393e-06, + "loss": 0.5427, + "step": 3373 + }, + { + "epoch": 0.1392258809936453, + "grad_norm": 10.765230121766445, + "learning_rate": 2.90717768634615e-06, + "loss": 0.5871, + "step": 3374 + }, + { + "epoch": 0.13926714533300322, + "grad_norm": 2.2804274556024047, + "learning_rate": 2.9071082460232465e-06, + "loss": 0.5371, + "step": 3375 + }, + { + "epoch": 0.13930840967236113, + "grad_norm": 16.2771522974645, + "learning_rate": 2.9070387805658696e-06, + "loss": 0.5523, + "step": 3376 + }, + { + "epoch": 0.13934967401171908, + "grad_norm": 3.3182154330396565, + "learning_rate": 2.9069692899752603e-06, + "loss": 0.5861, + "step": 3377 + }, + { + "epoch": 0.139390938351077, + "grad_norm": 3.358082946544758, + "learning_rate": 2.90689977425266e-06, + "loss": 0.5696, + "step": 3378 + }, + { + "epoch": 0.13943220269043494, + "grad_norm": 19.960117706229664, + "learning_rate": 2.9068302333993105e-06, + "loss": 0.605, + "step": 3379 + }, + { + "epoch": 0.13947346702979285, + "grad_norm": 5.6887616429728896, + "learning_rate": 2.906760667416453e-06, + "loss": 0.5734, + "step": 3380 + }, + { + "epoch": 0.13951473136915077, + "grad_norm": 3.9625955207512664, + "learning_rate": 2.906691076305331e-06, + "loss": 0.5865, + "step": 3381 + }, + { + "epoch": 0.1395559957085087, + "grad_norm": 2.917711570739164, + "learning_rate": 2.9066214600671877e-06, + "loss": 0.528, + "step": 3382 + }, + { + "epoch": 0.13959726004786663, + "grad_norm": 3.1687578199125332, + "learning_rate": 2.9065518187032662e-06, + "loss": 0.5056, + "step": 3383 + }, + { + "epoch": 0.13963852438722457, + "grad_norm": 2.1923200476369376, + "learning_rate": 2.9064821522148106e-06, + "loss": 0.6435, + "step": 3384 + }, + { + "epoch": 0.1396797887265825, + "grad_norm": 3.3113936499616745, + "learning_rate": 2.906412460603065e-06, + "loss": 0.5844, + "step": 3385 + }, + { + "epoch": 0.1397210530659404, + "grad_norm": 6.54786444607022, + "learning_rate": 2.9063427438692745e-06, + "loss": 0.5985, + "step": 3386 + }, + { + "epoch": 0.13976231740529835, + "grad_norm": 2.962009959359631, + "learning_rate": 2.9062730020146847e-06, + "loss": 0.6315, + "step": 3387 + }, + { + "epoch": 0.13980358174465626, + "grad_norm": 8.13878159745523, + "learning_rate": 2.9062032350405413e-06, + "loss": 0.5305, + "step": 3388 + }, + { + "epoch": 0.1398448460840142, + "grad_norm": 34.998683296670656, + "learning_rate": 2.90613344294809e-06, + "loss": 0.5478, + "step": 3389 + }, + { + "epoch": 0.13988611042337212, + "grad_norm": 3.8239105098031434, + "learning_rate": 2.9060636257385777e-06, + "loss": 0.5786, + "step": 3390 + }, + { + "epoch": 0.13992737476273004, + "grad_norm": 3.056587557106007, + "learning_rate": 2.905993783413252e-06, + "loss": 0.6174, + "step": 3391 + }, + { + "epoch": 0.13996863910208798, + "grad_norm": 3.7046629184748743, + "learning_rate": 2.90592391597336e-06, + "loss": 0.5983, + "step": 3392 + }, + { + "epoch": 0.1400099034414459, + "grad_norm": 3.0309971849114943, + "learning_rate": 2.9058540234201497e-06, + "loss": 0.5223, + "step": 3393 + }, + { + "epoch": 0.14005116778080384, + "grad_norm": 7.466324116928711, + "learning_rate": 2.905784105754869e-06, + "loss": 0.5697, + "step": 3394 + }, + { + "epoch": 0.14009243212016176, + "grad_norm": 5.936919927853751, + "learning_rate": 2.905714162978768e-06, + "loss": 0.5405, + "step": 3395 + }, + { + "epoch": 0.14013369645951967, + "grad_norm": 4.535896811918825, + "learning_rate": 2.905644195093096e-06, + "loss": 0.6181, + "step": 3396 + }, + { + "epoch": 0.14017496079887762, + "grad_norm": 3.310300299140054, + "learning_rate": 2.9055742020991016e-06, + "loss": 0.5535, + "step": 3397 + }, + { + "epoch": 0.14021622513823553, + "grad_norm": 2.2441661722747566, + "learning_rate": 2.905504183998036e-06, + "loss": 0.5107, + "step": 3398 + }, + { + "epoch": 0.14025748947759345, + "grad_norm": 2.1256372288086203, + "learning_rate": 2.90543414079115e-06, + "loss": 0.5542, + "step": 3399 + }, + { + "epoch": 0.1402987538169514, + "grad_norm": 3.7529805632692637, + "learning_rate": 2.9053640724796937e-06, + "loss": 0.5932, + "step": 3400 + }, + { + "epoch": 0.1403400181563093, + "grad_norm": 2.2097225317884908, + "learning_rate": 2.90529397906492e-06, + "loss": 0.5837, + "step": 3401 + }, + { + "epoch": 0.14038128249566725, + "grad_norm": 2.9390527423605346, + "learning_rate": 2.9052238605480793e-06, + "loss": 0.5567, + "step": 3402 + }, + { + "epoch": 0.14042254683502517, + "grad_norm": 11.928024082382432, + "learning_rate": 2.9051537169304263e-06, + "loss": 0.5786, + "step": 3403 + }, + { + "epoch": 0.14046381117438309, + "grad_norm": 5.0432099285140986, + "learning_rate": 2.905083548213212e-06, + "loss": 0.6252, + "step": 3404 + }, + { + "epoch": 0.14050507551374103, + "grad_norm": 10.39370029754283, + "learning_rate": 2.905013354397691e-06, + "loss": 0.5712, + "step": 3405 + }, + { + "epoch": 0.14054633985309894, + "grad_norm": 3.050522609571704, + "learning_rate": 2.9049431354851163e-06, + "loss": 0.5604, + "step": 3406 + }, + { + "epoch": 0.1405876041924569, + "grad_norm": 11.428261923589904, + "learning_rate": 2.904872891476743e-06, + "loss": 0.5962, + "step": 3407 + }, + { + "epoch": 0.1406288685318148, + "grad_norm": 3.6244558301298717, + "learning_rate": 2.9048026223738254e-06, + "loss": 0.5626, + "step": 3408 + }, + { + "epoch": 0.14067013287117272, + "grad_norm": 3.429880719148983, + "learning_rate": 2.904732328177618e-06, + "loss": 0.5585, + "step": 3409 + }, + { + "epoch": 0.14071139721053066, + "grad_norm": 7.590785966855928, + "learning_rate": 2.904662008889378e-06, + "loss": 0.6208, + "step": 3410 + }, + { + "epoch": 0.14075266154988858, + "grad_norm": 4.930298342588634, + "learning_rate": 2.90459166451036e-06, + "loss": 0.5515, + "step": 3411 + }, + { + "epoch": 0.14079392588924652, + "grad_norm": 4.6276711830956705, + "learning_rate": 2.904521295041821e-06, + "loss": 0.5249, + "step": 3412 + }, + { + "epoch": 0.14083519022860444, + "grad_norm": 2.953471359940074, + "learning_rate": 2.9044509004850188e-06, + "loss": 0.5442, + "step": 3413 + }, + { + "epoch": 0.14087645456796236, + "grad_norm": 4.773488584445968, + "learning_rate": 2.904380480841209e-06, + "loss": 0.6051, + "step": 3414 + }, + { + "epoch": 0.1409177189073203, + "grad_norm": 3.785722526531296, + "learning_rate": 2.9043100361116515e-06, + "loss": 0.5707, + "step": 3415 + }, + { + "epoch": 0.14095898324667822, + "grad_norm": 4.138789030113667, + "learning_rate": 2.904239566297604e-06, + "loss": 0.5775, + "step": 3416 + }, + { + "epoch": 0.14100024758603616, + "grad_norm": 3.6052798365808623, + "learning_rate": 2.9041690714003245e-06, + "loss": 0.5577, + "step": 3417 + }, + { + "epoch": 0.14104151192539408, + "grad_norm": 4.154924291078735, + "learning_rate": 2.9040985514210726e-06, + "loss": 0.5449, + "step": 3418 + }, + { + "epoch": 0.141082776264752, + "grad_norm": 29.44761881412274, + "learning_rate": 2.904028006361108e-06, + "loss": 0.6105, + "step": 3419 + }, + { + "epoch": 0.14112404060410993, + "grad_norm": 4.624818807169676, + "learning_rate": 2.9039574362216912e-06, + "loss": 0.5145, + "step": 3420 + }, + { + "epoch": 0.14116530494346785, + "grad_norm": 2.0581754010913493, + "learning_rate": 2.9038868410040826e-06, + "loss": 0.527, + "step": 3421 + }, + { + "epoch": 0.1412065692828258, + "grad_norm": 3.974108606076944, + "learning_rate": 2.903816220709542e-06, + "loss": 0.5316, + "step": 3422 + }, + { + "epoch": 0.1412478336221837, + "grad_norm": 2.9415425580409496, + "learning_rate": 2.903745575339333e-06, + "loss": 0.5382, + "step": 3423 + }, + { + "epoch": 0.14128909796154163, + "grad_norm": 2.739139453002585, + "learning_rate": 2.9036749048947157e-06, + "loss": 0.569, + "step": 3424 + }, + { + "epoch": 0.14133036230089957, + "grad_norm": 2.5897936552027367, + "learning_rate": 2.9036042093769538e-06, + "loss": 0.571, + "step": 3425 + }, + { + "epoch": 0.1413716266402575, + "grad_norm": 2.9168664562975395, + "learning_rate": 2.903533488787309e-06, + "loss": 0.5545, + "step": 3426 + }, + { + "epoch": 0.14141289097961543, + "grad_norm": 3.2670586365731125, + "learning_rate": 2.9034627431270446e-06, + "loss": 0.65, + "step": 3427 + }, + { + "epoch": 0.14145415531897335, + "grad_norm": 3.965201717010056, + "learning_rate": 2.9033919723974254e-06, + "loss": 0.6807, + "step": 3428 + }, + { + "epoch": 0.14149541965833126, + "grad_norm": 4.09977654211697, + "learning_rate": 2.9033211765997145e-06, + "loss": 0.4829, + "step": 3429 + }, + { + "epoch": 0.1415366839976892, + "grad_norm": 2.8020503348463834, + "learning_rate": 2.9032503557351766e-06, + "loss": 0.5562, + "step": 3430 + }, + { + "epoch": 0.14157794833704712, + "grad_norm": 7.041388618896938, + "learning_rate": 2.9031795098050774e-06, + "loss": 0.5819, + "step": 3431 + }, + { + "epoch": 0.14161921267640504, + "grad_norm": 2.514690147312613, + "learning_rate": 2.9031086388106817e-06, + "loss": 0.556, + "step": 3432 + }, + { + "epoch": 0.14166047701576298, + "grad_norm": 2.1709181692417894, + "learning_rate": 2.9030377427532557e-06, + "loss": 0.5457, + "step": 3433 + }, + { + "epoch": 0.1417017413551209, + "grad_norm": 3.27461583425514, + "learning_rate": 2.9029668216340654e-06, + "loss": 0.5937, + "step": 3434 + }, + { + "epoch": 0.14174300569447884, + "grad_norm": 2.4245959436215125, + "learning_rate": 2.9028958754543783e-06, + "loss": 0.5253, + "step": 3435 + }, + { + "epoch": 0.14178427003383676, + "grad_norm": 2.9432711369449636, + "learning_rate": 2.9028249042154616e-06, + "loss": 0.5945, + "step": 3436 + }, + { + "epoch": 0.14182553437319467, + "grad_norm": 3.9780683123408873, + "learning_rate": 2.9027539079185824e-06, + "loss": 0.5481, + "step": 3437 + }, + { + "epoch": 0.14186679871255262, + "grad_norm": 5.529577425675643, + "learning_rate": 2.9026828865650097e-06, + "loss": 0.5487, + "step": 3438 + }, + { + "epoch": 0.14190806305191053, + "grad_norm": 4.53891182162167, + "learning_rate": 2.9026118401560113e-06, + "loss": 0.5901, + "step": 3439 + }, + { + "epoch": 0.14194932739126848, + "grad_norm": 2.429588706713955, + "learning_rate": 2.9025407686928567e-06, + "loss": 0.5751, + "step": 3440 + }, + { + "epoch": 0.1419905917306264, + "grad_norm": 5.433932882738031, + "learning_rate": 2.9024696721768154e-06, + "loss": 0.5157, + "step": 3441 + }, + { + "epoch": 0.1420318560699843, + "grad_norm": 3.213925127278735, + "learning_rate": 2.9023985506091577e-06, + "loss": 0.5797, + "step": 3442 + }, + { + "epoch": 0.14207312040934225, + "grad_norm": 2.6888150644515707, + "learning_rate": 2.9023274039911528e-06, + "loss": 0.6172, + "step": 3443 + }, + { + "epoch": 0.14211438474870017, + "grad_norm": 2.8491244642254525, + "learning_rate": 2.9022562323240725e-06, + "loss": 0.5833, + "step": 3444 + }, + { + "epoch": 0.1421556490880581, + "grad_norm": 4.8367249370634715, + "learning_rate": 2.9021850356091886e-06, + "loss": 0.5169, + "step": 3445 + }, + { + "epoch": 0.14219691342741603, + "grad_norm": 2.734907690367311, + "learning_rate": 2.9021138138477717e-06, + "loss": 0.5719, + "step": 3446 + }, + { + "epoch": 0.14223817776677394, + "grad_norm": 3.0581983011097913, + "learning_rate": 2.902042567041095e-06, + "loss": 0.5349, + "step": 3447 + }, + { + "epoch": 0.1422794421061319, + "grad_norm": 8.451827612044633, + "learning_rate": 2.9019712951904304e-06, + "loss": 0.5761, + "step": 3448 + }, + { + "epoch": 0.1423207064454898, + "grad_norm": 2.5114186176338293, + "learning_rate": 2.901899998297051e-06, + "loss": 0.5411, + "step": 3449 + }, + { + "epoch": 0.14236197078484775, + "grad_norm": 3.433043033399463, + "learning_rate": 2.901828676362231e-06, + "loss": 0.6228, + "step": 3450 + }, + { + "epoch": 0.14240323512420566, + "grad_norm": 7.501691232698406, + "learning_rate": 2.9017573293872444e-06, + "loss": 0.5862, + "step": 3451 + }, + { + "epoch": 0.14244449946356358, + "grad_norm": 5.15838378419425, + "learning_rate": 2.901685957373365e-06, + "loss": 0.6238, + "step": 3452 + }, + { + "epoch": 0.14248576380292152, + "grad_norm": 3.7409618863620167, + "learning_rate": 2.901614560321867e-06, + "loss": 0.5778, + "step": 3453 + }, + { + "epoch": 0.14252702814227944, + "grad_norm": 3.6045436560637394, + "learning_rate": 2.9015431382340277e-06, + "loss": 0.5595, + "step": 3454 + }, + { + "epoch": 0.14256829248163738, + "grad_norm": 8.219143006489741, + "learning_rate": 2.9014716911111214e-06, + "loss": 0.592, + "step": 3455 + }, + { + "epoch": 0.1426095568209953, + "grad_norm": 5.235952054259343, + "learning_rate": 2.9014002189544245e-06, + "loss": 0.5149, + "step": 3456 + }, + { + "epoch": 0.14265082116035321, + "grad_norm": 2.1319865977636856, + "learning_rate": 2.9013287217652146e-06, + "loss": 0.5512, + "step": 3457 + }, + { + "epoch": 0.14269208549971116, + "grad_norm": 2.9202532227320552, + "learning_rate": 2.9012571995447675e-06, + "loss": 0.5629, + "step": 3458 + }, + { + "epoch": 0.14273334983906907, + "grad_norm": 2.506466156516768, + "learning_rate": 2.9011856522943616e-06, + "loss": 0.5767, + "step": 3459 + }, + { + "epoch": 0.142774614178427, + "grad_norm": 2.663078289355399, + "learning_rate": 2.901114080015275e-06, + "loss": 0.5391, + "step": 3460 + }, + { + "epoch": 0.14281587851778493, + "grad_norm": 20.002462191189693, + "learning_rate": 2.9010424827087853e-06, + "loss": 0.5668, + "step": 3461 + }, + { + "epoch": 0.14285714285714285, + "grad_norm": 4.250614799053193, + "learning_rate": 2.9009708603761724e-06, + "loss": 0.529, + "step": 3462 + }, + { + "epoch": 0.1428984071965008, + "grad_norm": 1.8231122577977532, + "learning_rate": 2.9008992130187157e-06, + "loss": 0.5118, + "step": 3463 + }, + { + "epoch": 0.1429396715358587, + "grad_norm": 3.4430366151050555, + "learning_rate": 2.9008275406376936e-06, + "loss": 0.5909, + "step": 3464 + }, + { + "epoch": 0.14298093587521662, + "grad_norm": 4.192541036766385, + "learning_rate": 2.900755843234388e-06, + "loss": 0.559, + "step": 3465 + }, + { + "epoch": 0.14302220021457457, + "grad_norm": 3.905138751944511, + "learning_rate": 2.9006841208100786e-06, + "loss": 0.546, + "step": 3466 + }, + { + "epoch": 0.14306346455393248, + "grad_norm": 3.3858172136563773, + "learning_rate": 2.9006123733660467e-06, + "loss": 0.573, + "step": 3467 + }, + { + "epoch": 0.14310472889329043, + "grad_norm": 2.2511310673994207, + "learning_rate": 2.900540600903574e-06, + "loss": 0.4886, + "step": 3468 + }, + { + "epoch": 0.14314599323264834, + "grad_norm": 2.6526784665155523, + "learning_rate": 2.900468803423943e-06, + "loss": 0.5113, + "step": 3469 + }, + { + "epoch": 0.14318725757200626, + "grad_norm": 2.3463873803256727, + "learning_rate": 2.900396980928435e-06, + "loss": 0.5987, + "step": 3470 + }, + { + "epoch": 0.1432285219113642, + "grad_norm": 5.664740507959402, + "learning_rate": 2.9003251334183343e-06, + "loss": 0.5768, + "step": 3471 + }, + { + "epoch": 0.14326978625072212, + "grad_norm": 16.63109045156123, + "learning_rate": 2.9002532608949236e-06, + "loss": 0.589, + "step": 3472 + }, + { + "epoch": 0.14331105059008006, + "grad_norm": 4.410620782902641, + "learning_rate": 2.9001813633594868e-06, + "loss": 0.5604, + "step": 3473 + }, + { + "epoch": 0.14335231492943798, + "grad_norm": 3.2517099664858184, + "learning_rate": 2.9001094408133074e-06, + "loss": 0.581, + "step": 3474 + }, + { + "epoch": 0.1433935792687959, + "grad_norm": 5.184743072596302, + "learning_rate": 2.900037493257672e-06, + "loss": 0.559, + "step": 3475 + }, + { + "epoch": 0.14343484360815384, + "grad_norm": 3.7336926392339373, + "learning_rate": 2.8999655206938636e-06, + "loss": 0.53, + "step": 3476 + }, + { + "epoch": 0.14347610794751176, + "grad_norm": 2.8114679158512654, + "learning_rate": 2.8998935231231695e-06, + "loss": 0.5363, + "step": 3477 + }, + { + "epoch": 0.1435173722868697, + "grad_norm": 7.085413508546744, + "learning_rate": 2.8998215005468748e-06, + "loss": 0.5989, + "step": 3478 + }, + { + "epoch": 0.14355863662622761, + "grad_norm": 4.616909886894998, + "learning_rate": 2.8997494529662666e-06, + "loss": 0.5672, + "step": 3479 + }, + { + "epoch": 0.14359990096558553, + "grad_norm": 3.005081922459344, + "learning_rate": 2.8996773803826307e-06, + "loss": 0.5949, + "step": 3480 + }, + { + "epoch": 0.14364116530494347, + "grad_norm": 4.841458316124078, + "learning_rate": 2.8996052827972563e-06, + "loss": 0.5358, + "step": 3481 + }, + { + "epoch": 0.1436824296443014, + "grad_norm": 3.376715967430784, + "learning_rate": 2.8995331602114295e-06, + "loss": 0.4977, + "step": 3482 + }, + { + "epoch": 0.14372369398365933, + "grad_norm": 2.006125165036133, + "learning_rate": 2.8994610126264397e-06, + "loss": 0.4953, + "step": 3483 + }, + { + "epoch": 0.14376495832301725, + "grad_norm": 4.46767557452133, + "learning_rate": 2.8993888400435755e-06, + "loss": 0.5793, + "step": 3484 + }, + { + "epoch": 0.14380622266237517, + "grad_norm": 4.099935134530016, + "learning_rate": 2.8993166424641253e-06, + "loss": 0.5327, + "step": 3485 + }, + { + "epoch": 0.1438474870017331, + "grad_norm": 4.311675642445506, + "learning_rate": 2.89924441988938e-06, + "loss": 0.5816, + "step": 3486 + }, + { + "epoch": 0.14388875134109103, + "grad_norm": 4.916904224069306, + "learning_rate": 2.899172172320628e-06, + "loss": 0.582, + "step": 3487 + }, + { + "epoch": 0.14393001568044897, + "grad_norm": 3.0928574088873315, + "learning_rate": 2.8990998997591622e-06, + "loss": 0.5553, + "step": 3488 + }, + { + "epoch": 0.14397128001980689, + "grad_norm": 5.580626342521701, + "learning_rate": 2.8990276022062712e-06, + "loss": 0.6124, + "step": 3489 + }, + { + "epoch": 0.1440125443591648, + "grad_norm": 3.149490597542664, + "learning_rate": 2.8989552796632477e-06, + "loss": 0.5925, + "step": 3490 + }, + { + "epoch": 0.14405380869852275, + "grad_norm": 3.370430318142831, + "learning_rate": 2.8988829321313835e-06, + "loss": 0.5186, + "step": 3491 + }, + { + "epoch": 0.14409507303788066, + "grad_norm": 6.680720099819024, + "learning_rate": 2.8988105596119703e-06, + "loss": 0.6031, + "step": 3492 + }, + { + "epoch": 0.14413633737723858, + "grad_norm": 3.83668167657685, + "learning_rate": 2.8987381621063014e-06, + "loss": 0.6056, + "step": 3493 + }, + { + "epoch": 0.14417760171659652, + "grad_norm": 4.912177709616699, + "learning_rate": 2.89866573961567e-06, + "loss": 0.592, + "step": 3494 + }, + { + "epoch": 0.14421886605595444, + "grad_norm": 5.12851897692191, + "learning_rate": 2.898593292141369e-06, + "loss": 0.5651, + "step": 3495 + }, + { + "epoch": 0.14426013039531238, + "grad_norm": 3.5197812752616495, + "learning_rate": 2.8985208196846933e-06, + "loss": 0.5852, + "step": 3496 + }, + { + "epoch": 0.1443013947346703, + "grad_norm": 3.1279254228221016, + "learning_rate": 2.898448322246937e-06, + "loss": 0.5305, + "step": 3497 + }, + { + "epoch": 0.1443426590740282, + "grad_norm": 10.616363603483142, + "learning_rate": 2.8983757998293957e-06, + "loss": 0.5275, + "step": 3498 + }, + { + "epoch": 0.14438392341338616, + "grad_norm": 2.678147083071614, + "learning_rate": 2.898303252433364e-06, + "loss": 0.5741, + "step": 3499 + }, + { + "epoch": 0.14442518775274407, + "grad_norm": 4.614717037766754, + "learning_rate": 2.8982306800601385e-06, + "loss": 0.5574, + "step": 3500 + }, + { + "epoch": 0.14446645209210202, + "grad_norm": 2.473848587622587, + "learning_rate": 2.898158082711015e-06, + "loss": 0.656, + "step": 3501 + }, + { + "epoch": 0.14450771643145993, + "grad_norm": 11.47144407002926, + "learning_rate": 2.898085460387291e-06, + "loss": 0.5768, + "step": 3502 + }, + { + "epoch": 0.14454898077081785, + "grad_norm": 15.726383993971288, + "learning_rate": 2.8980128130902626e-06, + "loss": 0.5391, + "step": 3503 + }, + { + "epoch": 0.1445902451101758, + "grad_norm": 4.030295900715232, + "learning_rate": 2.8979401408212282e-06, + "loss": 0.5352, + "step": 3504 + }, + { + "epoch": 0.1446315094495337, + "grad_norm": 2.261596985681299, + "learning_rate": 2.897867443581486e-06, + "loss": 0.5561, + "step": 3505 + }, + { + "epoch": 0.14467277378889165, + "grad_norm": 7.021048613911068, + "learning_rate": 2.8977947213723343e-06, + "loss": 0.5713, + "step": 3506 + }, + { + "epoch": 0.14471403812824957, + "grad_norm": 3.1486661816411985, + "learning_rate": 2.897721974195072e-06, + "loss": 0.5728, + "step": 3507 + }, + { + "epoch": 0.14475530246760748, + "grad_norm": 4.19961664724973, + "learning_rate": 2.897649202050999e-06, + "loss": 0.5932, + "step": 3508 + }, + { + "epoch": 0.14479656680696543, + "grad_norm": 4.41020604436619, + "learning_rate": 2.8975764049414144e-06, + "loss": 0.5941, + "step": 3509 + }, + { + "epoch": 0.14483783114632334, + "grad_norm": 4.654972614031197, + "learning_rate": 2.897503582867619e-06, + "loss": 0.559, + "step": 3510 + }, + { + "epoch": 0.1448790954856813, + "grad_norm": 12.697616721599022, + "learning_rate": 2.8974307358309137e-06, + "loss": 0.5862, + "step": 3511 + }, + { + "epoch": 0.1449203598250392, + "grad_norm": 2.3861615501732962, + "learning_rate": 2.8973578638325996e-06, + "loss": 0.584, + "step": 3512 + }, + { + "epoch": 0.14496162416439712, + "grad_norm": 3.8044493875794383, + "learning_rate": 2.897284966873979e-06, + "loss": 0.5428, + "step": 3513 + }, + { + "epoch": 0.14500288850375506, + "grad_norm": 4.197448255664402, + "learning_rate": 2.8972120449563523e-06, + "loss": 0.4828, + "step": 3514 + }, + { + "epoch": 0.14504415284311298, + "grad_norm": 5.557955112473657, + "learning_rate": 2.897139098081024e-06, + "loss": 0.5888, + "step": 3515 + }, + { + "epoch": 0.14508541718247092, + "grad_norm": 3.8607882286414212, + "learning_rate": 2.8970661262492965e-06, + "loss": 0.564, + "step": 3516 + }, + { + "epoch": 0.14512668152182884, + "grad_norm": 5.483576756759804, + "learning_rate": 2.8969931294624727e-06, + "loss": 0.5627, + "step": 3517 + }, + { + "epoch": 0.14516794586118675, + "grad_norm": 5.6062100392473475, + "learning_rate": 2.8969201077218562e-06, + "loss": 0.6304, + "step": 3518 + }, + { + "epoch": 0.1452092102005447, + "grad_norm": 4.762816197193611, + "learning_rate": 2.896847061028753e-06, + "loss": 0.5332, + "step": 3519 + }, + { + "epoch": 0.1452504745399026, + "grad_norm": 8.14338025181391, + "learning_rate": 2.8967739893844665e-06, + "loss": 0.571, + "step": 3520 + }, + { + "epoch": 0.14529173887926056, + "grad_norm": 3.4231604366222212, + "learning_rate": 2.8967008927903025e-06, + "loss": 0.5019, + "step": 3521 + }, + { + "epoch": 0.14533300321861847, + "grad_norm": 3.4627980695389926, + "learning_rate": 2.8966277712475665e-06, + "loss": 0.5579, + "step": 3522 + }, + { + "epoch": 0.1453742675579764, + "grad_norm": 7.2957941264948385, + "learning_rate": 2.8965546247575643e-06, + "loss": 0.6258, + "step": 3523 + }, + { + "epoch": 0.14541553189733433, + "grad_norm": 9.680528248956614, + "learning_rate": 2.8964814533216033e-06, + "loss": 0.564, + "step": 3524 + }, + { + "epoch": 0.14545679623669225, + "grad_norm": 2.7072591433003925, + "learning_rate": 2.8964082569409903e-06, + "loss": 0.5269, + "step": 3525 + }, + { + "epoch": 0.14549806057605016, + "grad_norm": 2.866037683020897, + "learning_rate": 2.8963350356170316e-06, + "loss": 0.5581, + "step": 3526 + }, + { + "epoch": 0.1455393249154081, + "grad_norm": 3.6566422865989363, + "learning_rate": 2.8962617893510375e-06, + "loss": 0.5456, + "step": 3527 + }, + { + "epoch": 0.14558058925476602, + "grad_norm": 2.5128635138897395, + "learning_rate": 2.8961885181443136e-06, + "loss": 0.5937, + "step": 3528 + }, + { + "epoch": 0.14562185359412397, + "grad_norm": 9.478215961731165, + "learning_rate": 2.896115221998171e-06, + "loss": 0.5657, + "step": 3529 + }, + { + "epoch": 0.14566311793348188, + "grad_norm": 4.154181907831201, + "learning_rate": 2.8960419009139175e-06, + "loss": 0.624, + "step": 3530 + }, + { + "epoch": 0.1457043822728398, + "grad_norm": 3.3402714511709446, + "learning_rate": 2.895968554892863e-06, + "loss": 0.5784, + "step": 3531 + }, + { + "epoch": 0.14574564661219774, + "grad_norm": 7.234287548254827, + "learning_rate": 2.8958951839363184e-06, + "loss": 0.531, + "step": 3532 + }, + { + "epoch": 0.14578691095155566, + "grad_norm": 4.549700075956852, + "learning_rate": 2.895821788045594e-06, + "loss": 0.5556, + "step": 3533 + }, + { + "epoch": 0.1458281752909136, + "grad_norm": 3.038737304282141, + "learning_rate": 2.8957483672219996e-06, + "loss": 0.5908, + "step": 3534 + }, + { + "epoch": 0.14586943963027152, + "grad_norm": 7.727335907177432, + "learning_rate": 2.8956749214668486e-06, + "loss": 0.5277, + "step": 3535 + }, + { + "epoch": 0.14591070396962944, + "grad_norm": 2.4672130686889413, + "learning_rate": 2.895601450781452e-06, + "loss": 0.508, + "step": 3536 + }, + { + "epoch": 0.14595196830898738, + "grad_norm": 2.989554463205334, + "learning_rate": 2.895527955167122e-06, + "loss": 0.5674, + "step": 3537 + }, + { + "epoch": 0.1459932326483453, + "grad_norm": 2.4678429455409696, + "learning_rate": 2.8954544346251723e-06, + "loss": 0.5736, + "step": 3538 + }, + { + "epoch": 0.14603449698770324, + "grad_norm": 8.039045877465812, + "learning_rate": 2.895380889156915e-06, + "loss": 0.5834, + "step": 3539 + }, + { + "epoch": 0.14607576132706115, + "grad_norm": 4.0704805623840405, + "learning_rate": 2.895307318763664e-06, + "loss": 0.5648, + "step": 3540 + }, + { + "epoch": 0.14611702566641907, + "grad_norm": 4.632319845080038, + "learning_rate": 2.895233723446734e-06, + "loss": 0.5763, + "step": 3541 + }, + { + "epoch": 0.14615829000577701, + "grad_norm": 3.119859438487609, + "learning_rate": 2.89516010320744e-06, + "loss": 0.5856, + "step": 3542 + }, + { + "epoch": 0.14619955434513493, + "grad_norm": 2.9191524671042464, + "learning_rate": 2.8950864580470957e-06, + "loss": 0.5718, + "step": 3543 + }, + { + "epoch": 0.14624081868449287, + "grad_norm": 2.83898048378386, + "learning_rate": 2.8950127879670174e-06, + "loss": 0.559, + "step": 3544 + }, + { + "epoch": 0.1462820830238508, + "grad_norm": 2.556616435828697, + "learning_rate": 2.894939092968521e-06, + "loss": 0.5654, + "step": 3545 + }, + { + "epoch": 0.1463233473632087, + "grad_norm": 4.32300583540267, + "learning_rate": 2.8948653730529227e-06, + "loss": 0.5603, + "step": 3546 + }, + { + "epoch": 0.14636461170256665, + "grad_norm": 4.51260181077126, + "learning_rate": 2.8947916282215397e-06, + "loss": 0.6158, + "step": 3547 + }, + { + "epoch": 0.14640587604192457, + "grad_norm": 2.2880632422556784, + "learning_rate": 2.8947178584756884e-06, + "loss": 0.6037, + "step": 3548 + }, + { + "epoch": 0.1464471403812825, + "grad_norm": 8.524840123936322, + "learning_rate": 2.8946440638166876e-06, + "loss": 0.5943, + "step": 3549 + }, + { + "epoch": 0.14648840472064042, + "grad_norm": 5.1669946951439245, + "learning_rate": 2.8945702442458545e-06, + "loss": 0.5157, + "step": 3550 + }, + { + "epoch": 0.14652966905999834, + "grad_norm": 3.585016475125312, + "learning_rate": 2.894496399764509e-06, + "loss": 0.5431, + "step": 3551 + }, + { + "epoch": 0.14657093339935628, + "grad_norm": 12.807003085286752, + "learning_rate": 2.8944225303739684e-06, + "loss": 0.5813, + "step": 3552 + }, + { + "epoch": 0.1466121977387142, + "grad_norm": 3.1557152051922728, + "learning_rate": 2.894348636075554e-06, + "loss": 0.6029, + "step": 3553 + }, + { + "epoch": 0.14665346207807212, + "grad_norm": 6.71088551370698, + "learning_rate": 2.894274716870584e-06, + "loss": 0.578, + "step": 3554 + }, + { + "epoch": 0.14669472641743006, + "grad_norm": 3.857571688593563, + "learning_rate": 2.89420077276038e-06, + "loss": 0.6028, + "step": 3555 + }, + { + "epoch": 0.14673599075678798, + "grad_norm": 2.814902603116535, + "learning_rate": 2.894126803746262e-06, + "loss": 0.5733, + "step": 3556 + }, + { + "epoch": 0.14677725509614592, + "grad_norm": 16.248748377892227, + "learning_rate": 2.8940528098295525e-06, + "loss": 0.5053, + "step": 3557 + }, + { + "epoch": 0.14681851943550384, + "grad_norm": 3.907807395676536, + "learning_rate": 2.8939787910115717e-06, + "loss": 0.497, + "step": 3558 + }, + { + "epoch": 0.14685978377486175, + "grad_norm": 2.2170773310627157, + "learning_rate": 2.893904747293643e-06, + "loss": 0.579, + "step": 3559 + }, + { + "epoch": 0.1469010481142197, + "grad_norm": 3.297800489186449, + "learning_rate": 2.8938306786770883e-06, + "loss": 0.5152, + "step": 3560 + }, + { + "epoch": 0.1469423124535776, + "grad_norm": 2.9078958487329603, + "learning_rate": 2.893756585163231e-06, + "loss": 0.5893, + "step": 3561 + }, + { + "epoch": 0.14698357679293556, + "grad_norm": 2.6127970312594098, + "learning_rate": 2.893682466753394e-06, + "loss": 0.6326, + "step": 3562 + }, + { + "epoch": 0.14702484113229347, + "grad_norm": 3.265447211694918, + "learning_rate": 2.893608323448902e-06, + "loss": 0.5646, + "step": 3563 + }, + { + "epoch": 0.1470661054716514, + "grad_norm": 3.0702560738870956, + "learning_rate": 2.893534155251079e-06, + "loss": 0.5595, + "step": 3564 + }, + { + "epoch": 0.14710736981100933, + "grad_norm": 3.011361147820516, + "learning_rate": 2.8934599621612502e-06, + "loss": 0.5491, + "step": 3565 + }, + { + "epoch": 0.14714863415036725, + "grad_norm": 3.560390534155918, + "learning_rate": 2.89338574418074e-06, + "loss": 0.5535, + "step": 3566 + }, + { + "epoch": 0.1471898984897252, + "grad_norm": 27.71265710703692, + "learning_rate": 2.893311501310875e-06, + "loss": 0.5566, + "step": 3567 + }, + { + "epoch": 0.1472311628290831, + "grad_norm": 5.603466253019345, + "learning_rate": 2.893237233552981e-06, + "loss": 0.5751, + "step": 3568 + }, + { + "epoch": 0.14727242716844102, + "grad_norm": 32.17058191925821, + "learning_rate": 2.893162940908385e-06, + "loss": 0.5379, + "step": 3569 + }, + { + "epoch": 0.14731369150779897, + "grad_norm": 4.6946610613520345, + "learning_rate": 2.8930886233784135e-06, + "loss": 0.6195, + "step": 3570 + }, + { + "epoch": 0.14735495584715688, + "grad_norm": 2.3377523011548664, + "learning_rate": 2.8930142809643942e-06, + "loss": 0.5177, + "step": 3571 + }, + { + "epoch": 0.14739622018651483, + "grad_norm": 2.7950904215701775, + "learning_rate": 2.8929399136676548e-06, + "loss": 0.5972, + "step": 3572 + }, + { + "epoch": 0.14743748452587274, + "grad_norm": 2.568784315701662, + "learning_rate": 2.892865521489524e-06, + "loss": 0.6412, + "step": 3573 + }, + { + "epoch": 0.14747874886523066, + "grad_norm": 10.562375798362025, + "learning_rate": 2.8927911044313308e-06, + "loss": 0.5968, + "step": 3574 + }, + { + "epoch": 0.1475200132045886, + "grad_norm": 3.3056131171752137, + "learning_rate": 2.892716662494404e-06, + "loss": 0.5568, + "step": 3575 + }, + { + "epoch": 0.14756127754394652, + "grad_norm": 2.3217962983513907, + "learning_rate": 2.8926421956800738e-06, + "loss": 0.5486, + "step": 3576 + }, + { + "epoch": 0.14760254188330446, + "grad_norm": 3.0549766380238306, + "learning_rate": 2.8925677039896702e-06, + "loss": 0.5811, + "step": 3577 + }, + { + "epoch": 0.14764380622266238, + "grad_norm": 2.088221592484626, + "learning_rate": 2.8924931874245234e-06, + "loss": 0.5512, + "step": 3578 + }, + { + "epoch": 0.1476850705620203, + "grad_norm": 5.395393167485281, + "learning_rate": 2.8924186459859652e-06, + "loss": 0.5765, + "step": 3579 + }, + { + "epoch": 0.14772633490137824, + "grad_norm": 4.183833766714656, + "learning_rate": 2.8923440796753264e-06, + "loss": 0.5043, + "step": 3580 + }, + { + "epoch": 0.14776759924073615, + "grad_norm": 4.881831939558199, + "learning_rate": 2.8922694884939397e-06, + "loss": 0.5626, + "step": 3581 + }, + { + "epoch": 0.1478088635800941, + "grad_norm": 3.2774359717974786, + "learning_rate": 2.8921948724431364e-06, + "loss": 0.6508, + "step": 3582 + }, + { + "epoch": 0.147850127919452, + "grad_norm": 3.799707662095374, + "learning_rate": 2.89212023152425e-06, + "loss": 0.5911, + "step": 3583 + }, + { + "epoch": 0.14789139225880993, + "grad_norm": 4.537505983347937, + "learning_rate": 2.892045565738614e-06, + "loss": 0.5996, + "step": 3584 + }, + { + "epoch": 0.14793265659816787, + "grad_norm": 5.818779305912039, + "learning_rate": 2.8919708750875615e-06, + "loss": 0.5321, + "step": 3585 + }, + { + "epoch": 0.1479739209375258, + "grad_norm": 2.3953338217030016, + "learning_rate": 2.8918961595724273e-06, + "loss": 0.5513, + "step": 3586 + }, + { + "epoch": 0.1480151852768837, + "grad_norm": 4.863068492021125, + "learning_rate": 2.8918214191945456e-06, + "loss": 0.5812, + "step": 3587 + }, + { + "epoch": 0.14805644961624165, + "grad_norm": 2.641879475153756, + "learning_rate": 2.8917466539552515e-06, + "loss": 0.5451, + "step": 3588 + }, + { + "epoch": 0.14809771395559956, + "grad_norm": 2.54874559673725, + "learning_rate": 2.8916718638558805e-06, + "loss": 0.5949, + "step": 3589 + }, + { + "epoch": 0.1481389782949575, + "grad_norm": 12.514103924444424, + "learning_rate": 2.891597048897769e-06, + "loss": 0.6073, + "step": 3590 + }, + { + "epoch": 0.14818024263431542, + "grad_norm": 3.346146404401632, + "learning_rate": 2.891522209082252e-06, + "loss": 0.5599, + "step": 3591 + }, + { + "epoch": 0.14822150697367334, + "grad_norm": 2.7302706574588873, + "learning_rate": 2.8914473444106684e-06, + "loss": 0.5987, + "step": 3592 + }, + { + "epoch": 0.14826277131303128, + "grad_norm": 4.555869000959231, + "learning_rate": 2.891372454884354e-06, + "loss": 0.5975, + "step": 3593 + }, + { + "epoch": 0.1483040356523892, + "grad_norm": 2.8117263690255823, + "learning_rate": 2.8912975405046463e-06, + "loss": 0.5868, + "step": 3594 + }, + { + "epoch": 0.14834529999174714, + "grad_norm": 2.4108352038901257, + "learning_rate": 2.8912226012728845e-06, + "loss": 0.5611, + "step": 3595 + }, + { + "epoch": 0.14838656433110506, + "grad_norm": 2.237899185099206, + "learning_rate": 2.8911476371904067e-06, + "loss": 0.5594, + "step": 3596 + }, + { + "epoch": 0.14842782867046297, + "grad_norm": 2.644727253330112, + "learning_rate": 2.891072648258552e-06, + "loss": 0.5892, + "step": 3597 + }, + { + "epoch": 0.14846909300982092, + "grad_norm": 3.4269875314104388, + "learning_rate": 2.89099763447866e-06, + "loss": 0.5553, + "step": 3598 + }, + { + "epoch": 0.14851035734917883, + "grad_norm": 3.210080017221337, + "learning_rate": 2.8909225958520703e-06, + "loss": 0.5517, + "step": 3599 + }, + { + "epoch": 0.14855162168853678, + "grad_norm": 2.750803337858622, + "learning_rate": 2.890847532380124e-06, + "loss": 0.5374, + "step": 3600 + }, + { + "epoch": 0.1485928860278947, + "grad_norm": 3.4183973639572987, + "learning_rate": 2.89077244406416e-06, + "loss": 0.5769, + "step": 3601 + }, + { + "epoch": 0.1486341503672526, + "grad_norm": 3.3624558173621346, + "learning_rate": 2.8906973309055227e-06, + "loss": 0.5935, + "step": 3602 + }, + { + "epoch": 0.14867541470661055, + "grad_norm": 2.3525929391730687, + "learning_rate": 2.890622192905551e-06, + "loss": 0.5195, + "step": 3603 + }, + { + "epoch": 0.14871667904596847, + "grad_norm": 3.3554906566997653, + "learning_rate": 2.8905470300655885e-06, + "loss": 0.5571, + "step": 3604 + }, + { + "epoch": 0.1487579433853264, + "grad_norm": 2.2675765582719616, + "learning_rate": 2.8904718423869774e-06, + "loss": 0.5942, + "step": 3605 + }, + { + "epoch": 0.14879920772468433, + "grad_norm": 3.0196133587718816, + "learning_rate": 2.890396629871061e-06, + "loss": 0.5536, + "step": 3606 + }, + { + "epoch": 0.14884047206404225, + "grad_norm": 4.64884653734121, + "learning_rate": 2.890321392519182e-06, + "loss": 0.5519, + "step": 3607 + }, + { + "epoch": 0.1488817364034002, + "grad_norm": 4.519420294702108, + "learning_rate": 2.8902461303326855e-06, + "loss": 0.5637, + "step": 3608 + }, + { + "epoch": 0.1489230007427581, + "grad_norm": 4.713928292720171, + "learning_rate": 2.8901708433129144e-06, + "loss": 0.5776, + "step": 3609 + }, + { + "epoch": 0.14896426508211605, + "grad_norm": 7.193823833400869, + "learning_rate": 2.890095531461215e-06, + "loss": 0.5211, + "step": 3610 + }, + { + "epoch": 0.14900552942147396, + "grad_norm": 2.6308938578325027, + "learning_rate": 2.890020194778932e-06, + "loss": 0.5947, + "step": 3611 + }, + { + "epoch": 0.14904679376083188, + "grad_norm": 2.501380159455988, + "learning_rate": 2.889944833267411e-06, + "loss": 0.6074, + "step": 3612 + }, + { + "epoch": 0.14908805810018982, + "grad_norm": 3.88424525031536, + "learning_rate": 2.889869446927998e-06, + "loss": 0.5965, + "step": 3613 + }, + { + "epoch": 0.14912932243954774, + "grad_norm": 2.566779734645595, + "learning_rate": 2.8897940357620395e-06, + "loss": 0.554, + "step": 3614 + }, + { + "epoch": 0.14917058677890566, + "grad_norm": 2.826843870170135, + "learning_rate": 2.889718599770883e-06, + "loss": 0.5488, + "step": 3615 + }, + { + "epoch": 0.1492118511182636, + "grad_norm": 3.9856471720257884, + "learning_rate": 2.8896431389558757e-06, + "loss": 0.5791, + "step": 3616 + }, + { + "epoch": 0.14925311545762152, + "grad_norm": 4.076393855494112, + "learning_rate": 2.8895676533183655e-06, + "loss": 0.5344, + "step": 3617 + }, + { + "epoch": 0.14929437979697946, + "grad_norm": 2.8467144091715393, + "learning_rate": 2.8894921428597013e-06, + "loss": 0.6188, + "step": 3618 + }, + { + "epoch": 0.14933564413633738, + "grad_norm": 2.706639567873188, + "learning_rate": 2.889416607581231e-06, + "loss": 0.5435, + "step": 3619 + }, + { + "epoch": 0.1493769084756953, + "grad_norm": 3.1050263821339255, + "learning_rate": 2.8893410474843046e-06, + "loss": 0.5474, + "step": 3620 + }, + { + "epoch": 0.14941817281505324, + "grad_norm": 2.8820322930883555, + "learning_rate": 2.8892654625702716e-06, + "loss": 0.4938, + "step": 3621 + }, + { + "epoch": 0.14945943715441115, + "grad_norm": 6.276265802734426, + "learning_rate": 2.889189852840482e-06, + "loss": 0.5329, + "step": 3622 + }, + { + "epoch": 0.1495007014937691, + "grad_norm": 3.391725550953455, + "learning_rate": 2.889114218296286e-06, + "loss": 0.5783, + "step": 3623 + }, + { + "epoch": 0.149541965833127, + "grad_norm": 2.8595140463856272, + "learning_rate": 2.889038558939035e-06, + "loss": 0.6659, + "step": 3624 + }, + { + "epoch": 0.14958323017248493, + "grad_norm": 3.967142286832013, + "learning_rate": 2.888962874770081e-06, + "loss": 0.5761, + "step": 3625 + }, + { + "epoch": 0.14962449451184287, + "grad_norm": 2.674844145687808, + "learning_rate": 2.888887165790775e-06, + "loss": 0.5422, + "step": 3626 + }, + { + "epoch": 0.1496657588512008, + "grad_norm": 7.223614472577255, + "learning_rate": 2.8888114320024697e-06, + "loss": 0.5558, + "step": 3627 + }, + { + "epoch": 0.14970702319055873, + "grad_norm": 4.454474581330693, + "learning_rate": 2.8887356734065183e-06, + "loss": 0.5484, + "step": 3628 + }, + { + "epoch": 0.14974828752991665, + "grad_norm": 8.458604667270682, + "learning_rate": 2.8886598900042737e-06, + "loss": 0.5819, + "step": 3629 + }, + { + "epoch": 0.14978955186927456, + "grad_norm": 3.642597967286006, + "learning_rate": 2.8885840817970893e-06, + "loss": 0.5887, + "step": 3630 + }, + { + "epoch": 0.1498308162086325, + "grad_norm": 55.317074562437334, + "learning_rate": 2.8885082487863196e-06, + "loss": 0.5934, + "step": 3631 + }, + { + "epoch": 0.14987208054799042, + "grad_norm": 3.5750817301796927, + "learning_rate": 2.888432390973319e-06, + "loss": 0.5531, + "step": 3632 + }, + { + "epoch": 0.14991334488734837, + "grad_norm": 3.4209753413873765, + "learning_rate": 2.8883565083594426e-06, + "loss": 0.5242, + "step": 3633 + }, + { + "epoch": 0.14995460922670628, + "grad_norm": 4.927379989630723, + "learning_rate": 2.888280600946046e-06, + "loss": 0.5502, + "step": 3634 + }, + { + "epoch": 0.1499958735660642, + "grad_norm": 2.7691137572269353, + "learning_rate": 2.888204668734485e-06, + "loss": 0.5634, + "step": 3635 + }, + { + "epoch": 0.15003713790542214, + "grad_norm": 3.236882727784527, + "learning_rate": 2.888128711726115e-06, + "loss": 0.5792, + "step": 3636 + }, + { + "epoch": 0.15007840224478006, + "grad_norm": 3.886827088511389, + "learning_rate": 2.8880527299222942e-06, + "loss": 0.585, + "step": 3637 + }, + { + "epoch": 0.150119666584138, + "grad_norm": 3.12537824906755, + "learning_rate": 2.8879767233243794e-06, + "loss": 0.5495, + "step": 3638 + }, + { + "epoch": 0.15016093092349592, + "grad_norm": 5.037443360690843, + "learning_rate": 2.887900691933728e-06, + "loss": 0.5896, + "step": 3639 + }, + { + "epoch": 0.15020219526285383, + "grad_norm": 5.030520031680283, + "learning_rate": 2.8878246357516983e-06, + "loss": 0.5369, + "step": 3640 + }, + { + "epoch": 0.15024345960221178, + "grad_norm": 3.054252658885237, + "learning_rate": 2.887748554779649e-06, + "loss": 0.5522, + "step": 3641 + }, + { + "epoch": 0.1502847239415697, + "grad_norm": 3.5263442831354035, + "learning_rate": 2.8876724490189386e-06, + "loss": 0.5623, + "step": 3642 + }, + { + "epoch": 0.15032598828092764, + "grad_norm": 1.921941475611062, + "learning_rate": 2.887596318470927e-06, + "loss": 0.5749, + "step": 3643 + }, + { + "epoch": 0.15036725262028555, + "grad_norm": 2.1697193809867263, + "learning_rate": 2.887520163136974e-06, + "loss": 0.5253, + "step": 3644 + }, + { + "epoch": 0.15040851695964347, + "grad_norm": 4.648689870490981, + "learning_rate": 2.88744398301844e-06, + "loss": 0.5554, + "step": 3645 + }, + { + "epoch": 0.1504497812990014, + "grad_norm": 8.35745764466937, + "learning_rate": 2.887367778116685e-06, + "loss": 0.5597, + "step": 3646 + }, + { + "epoch": 0.15049104563835933, + "grad_norm": 9.776192586528175, + "learning_rate": 2.887291548433071e-06, + "loss": 0.5671, + "step": 3647 + }, + { + "epoch": 0.15053230997771724, + "grad_norm": 3.236008823091695, + "learning_rate": 2.88721529396896e-06, + "loss": 0.5422, + "step": 3648 + }, + { + "epoch": 0.1505735743170752, + "grad_norm": 8.66858226720706, + "learning_rate": 2.8871390147257134e-06, + "loss": 0.5891, + "step": 3649 + }, + { + "epoch": 0.1506148386564331, + "grad_norm": 2.711537136217907, + "learning_rate": 2.887062710704693e-06, + "loss": 0.5789, + "step": 3650 + }, + { + "epoch": 0.15065610299579105, + "grad_norm": 3.610995872794601, + "learning_rate": 2.886986381907264e-06, + "loss": 0.6298, + "step": 3651 + }, + { + "epoch": 0.15069736733514896, + "grad_norm": 2.599764290489557, + "learning_rate": 2.886910028334788e-06, + "loss": 0.5674, + "step": 3652 + }, + { + "epoch": 0.15073863167450688, + "grad_norm": 3.469685753827392, + "learning_rate": 2.886833649988629e-06, + "loss": 0.5273, + "step": 3653 + }, + { + "epoch": 0.15077989601386482, + "grad_norm": 3.436138378519911, + "learning_rate": 2.886757246870152e-06, + "loss": 0.5731, + "step": 3654 + }, + { + "epoch": 0.15082116035322274, + "grad_norm": 3.721192389965138, + "learning_rate": 2.886680818980721e-06, + "loss": 0.5713, + "step": 3655 + }, + { + "epoch": 0.15086242469258068, + "grad_norm": 2.902147111271003, + "learning_rate": 2.8866043663217022e-06, + "loss": 0.571, + "step": 3656 + }, + { + "epoch": 0.1509036890319386, + "grad_norm": 2.157163930829488, + "learning_rate": 2.88652788889446e-06, + "loss": 0.5363, + "step": 3657 + }, + { + "epoch": 0.15094495337129651, + "grad_norm": 2.1690648126913463, + "learning_rate": 2.8864513867003623e-06, + "loss": 0.6011, + "step": 3658 + }, + { + "epoch": 0.15098621771065446, + "grad_norm": 2.6535368964434713, + "learning_rate": 2.8863748597407735e-06, + "loss": 0.5639, + "step": 3659 + }, + { + "epoch": 0.15102748205001237, + "grad_norm": 2.8114004334713942, + "learning_rate": 2.8862983080170622e-06, + "loss": 0.6126, + "step": 3660 + }, + { + "epoch": 0.15106874638937032, + "grad_norm": 2.9273560249290864, + "learning_rate": 2.8862217315305943e-06, + "loss": 0.5845, + "step": 3661 + }, + { + "epoch": 0.15111001072872823, + "grad_norm": 3.1399150651803747, + "learning_rate": 2.8861451302827394e-06, + "loss": 0.5766, + "step": 3662 + }, + { + "epoch": 0.15115127506808615, + "grad_norm": 2.8874349058908635, + "learning_rate": 2.886068504274864e-06, + "loss": 0.5684, + "step": 3663 + }, + { + "epoch": 0.1511925394074441, + "grad_norm": 3.371173848870582, + "learning_rate": 2.885991853508338e-06, + "loss": 0.5624, + "step": 3664 + }, + { + "epoch": 0.151233803746802, + "grad_norm": 5.248717449397986, + "learning_rate": 2.8859151779845306e-06, + "loss": 0.5825, + "step": 3665 + }, + { + "epoch": 0.15127506808615995, + "grad_norm": 4.635344612187382, + "learning_rate": 2.885838477704811e-06, + "loss": 0.5568, + "step": 3666 + }, + { + "epoch": 0.15131633242551787, + "grad_norm": 2.7799037633978863, + "learning_rate": 2.885761752670549e-06, + "loss": 0.5773, + "step": 3667 + }, + { + "epoch": 0.15135759676487578, + "grad_norm": 5.813234235984902, + "learning_rate": 2.8856850028831155e-06, + "loss": 0.6013, + "step": 3668 + }, + { + "epoch": 0.15139886110423373, + "grad_norm": 3.3696133911778494, + "learning_rate": 2.8856082283438816e-06, + "loss": 0.5323, + "step": 3669 + }, + { + "epoch": 0.15144012544359164, + "grad_norm": 2.8843960955333023, + "learning_rate": 2.8855314290542187e-06, + "loss": 0.5698, + "step": 3670 + }, + { + "epoch": 0.1514813897829496, + "grad_norm": 3.189988032899315, + "learning_rate": 2.885454605015498e-06, + "loss": 0.6087, + "step": 3671 + }, + { + "epoch": 0.1515226541223075, + "grad_norm": 2.5209586608296854, + "learning_rate": 2.8853777562290925e-06, + "loss": 0.5932, + "step": 3672 + }, + { + "epoch": 0.15156391846166542, + "grad_norm": 3.4697424505300827, + "learning_rate": 2.8853008826963743e-06, + "loss": 0.5552, + "step": 3673 + }, + { + "epoch": 0.15160518280102336, + "grad_norm": 7.35445663254161, + "learning_rate": 2.8852239844187173e-06, + "loss": 0.602, + "step": 3674 + }, + { + "epoch": 0.15164644714038128, + "grad_norm": 2.936192326516175, + "learning_rate": 2.885147061397494e-06, + "loss": 0.5771, + "step": 3675 + }, + { + "epoch": 0.1516877114797392, + "grad_norm": 2.9167813810705563, + "learning_rate": 2.885070113634079e-06, + "loss": 0.5975, + "step": 3676 + }, + { + "epoch": 0.15172897581909714, + "grad_norm": 2.868451021877876, + "learning_rate": 2.8849931411298475e-06, + "loss": 0.6214, + "step": 3677 + }, + { + "epoch": 0.15177024015845506, + "grad_norm": 4.224382625406963, + "learning_rate": 2.884916143886174e-06, + "loss": 0.5605, + "step": 3678 + }, + { + "epoch": 0.151811504497813, + "grad_norm": 2.330202109558185, + "learning_rate": 2.884839121904433e-06, + "loss": 0.5832, + "step": 3679 + }, + { + "epoch": 0.15185276883717092, + "grad_norm": 3.0374412321174318, + "learning_rate": 2.884762075186001e-06, + "loss": 0.5997, + "step": 3680 + }, + { + "epoch": 0.15189403317652883, + "grad_norm": 3.549019155372298, + "learning_rate": 2.884685003732254e-06, + "loss": 0.5659, + "step": 3681 + }, + { + "epoch": 0.15193529751588677, + "grad_norm": 3.7200751642333434, + "learning_rate": 2.884607907544569e-06, + "loss": 0.5646, + "step": 3682 + }, + { + "epoch": 0.1519765618552447, + "grad_norm": 2.843859408707406, + "learning_rate": 2.884530786624323e-06, + "loss": 0.5476, + "step": 3683 + }, + { + "epoch": 0.15201782619460263, + "grad_norm": 3.730622784676267, + "learning_rate": 2.8844536409728945e-06, + "loss": 0.5824, + "step": 3684 + }, + { + "epoch": 0.15205909053396055, + "grad_norm": 5.79885309948656, + "learning_rate": 2.8843764705916596e-06, + "loss": 0.5718, + "step": 3685 + }, + { + "epoch": 0.15210035487331847, + "grad_norm": 2.1002757248153103, + "learning_rate": 2.8842992754819983e-06, + "loss": 0.5632, + "step": 3686 + }, + { + "epoch": 0.1521416192126764, + "grad_norm": 18.62754531655708, + "learning_rate": 2.8842220556452887e-06, + "loss": 0.5052, + "step": 3687 + }, + { + "epoch": 0.15218288355203433, + "grad_norm": 4.477169379410492, + "learning_rate": 2.88414481108291e-06, + "loss": 0.54, + "step": 3688 + }, + { + "epoch": 0.15222414789139227, + "grad_norm": 2.6694974020764604, + "learning_rate": 2.8840675417962427e-06, + "loss": 0.5688, + "step": 3689 + }, + { + "epoch": 0.15226541223075019, + "grad_norm": 3.608883755531881, + "learning_rate": 2.883990247786667e-06, + "loss": 0.6714, + "step": 3690 + }, + { + "epoch": 0.1523066765701081, + "grad_norm": 9.729782955863282, + "learning_rate": 2.883912929055563e-06, + "loss": 0.6208, + "step": 3691 + }, + { + "epoch": 0.15234794090946605, + "grad_norm": 2.3461123661338226, + "learning_rate": 2.8838355856043123e-06, + "loss": 0.5787, + "step": 3692 + }, + { + "epoch": 0.15238920524882396, + "grad_norm": 4.693021233831294, + "learning_rate": 2.8837582174342956e-06, + "loss": 0.6008, + "step": 3693 + }, + { + "epoch": 0.1524304695881819, + "grad_norm": 2.817707197228011, + "learning_rate": 2.883680824546896e-06, + "loss": 0.5955, + "step": 3694 + }, + { + "epoch": 0.15247173392753982, + "grad_norm": 3.240900408919326, + "learning_rate": 2.8836034069434957e-06, + "loss": 0.5329, + "step": 3695 + }, + { + "epoch": 0.15251299826689774, + "grad_norm": 2.996956055218352, + "learning_rate": 2.8835259646254764e-06, + "loss": 0.6046, + "step": 3696 + }, + { + "epoch": 0.15255426260625568, + "grad_norm": 4.518146337189105, + "learning_rate": 2.883448497594223e-06, + "loss": 0.5699, + "step": 3697 + }, + { + "epoch": 0.1525955269456136, + "grad_norm": 2.6702693741102097, + "learning_rate": 2.8833710058511186e-06, + "loss": 0.5719, + "step": 3698 + }, + { + "epoch": 0.15263679128497154, + "grad_norm": 2.824371403853697, + "learning_rate": 2.8832934893975473e-06, + "loss": 0.5726, + "step": 3699 + }, + { + "epoch": 0.15267805562432946, + "grad_norm": 5.418025802025248, + "learning_rate": 2.8832159482348937e-06, + "loss": 0.5971, + "step": 3700 + }, + { + "epoch": 0.15271931996368737, + "grad_norm": 11.601142285923403, + "learning_rate": 2.883138382364543e-06, + "loss": 0.5517, + "step": 3701 + }, + { + "epoch": 0.15276058430304532, + "grad_norm": 3.0889319427151074, + "learning_rate": 2.8830607917878806e-06, + "loss": 0.5892, + "step": 3702 + }, + { + "epoch": 0.15280184864240323, + "grad_norm": 7.00727357286245, + "learning_rate": 2.882983176506293e-06, + "loss": 0.6031, + "step": 3703 + }, + { + "epoch": 0.15284311298176118, + "grad_norm": 2.5652552744111317, + "learning_rate": 2.8829055365211655e-06, + "loss": 0.5398, + "step": 3704 + }, + { + "epoch": 0.1528843773211191, + "grad_norm": 7.479524098300827, + "learning_rate": 2.882827871833886e-06, + "loss": 0.593, + "step": 3705 + }, + { + "epoch": 0.152925641660477, + "grad_norm": 3.329611875296925, + "learning_rate": 2.882750182445841e-06, + "loss": 0.5633, + "step": 3706 + }, + { + "epoch": 0.15296690599983495, + "grad_norm": 3.4216647376751754, + "learning_rate": 2.882672468358419e-06, + "loss": 0.5775, + "step": 3707 + }, + { + "epoch": 0.15300817033919287, + "grad_norm": 2.520253253607398, + "learning_rate": 2.882594729573008e-06, + "loss": 0.5771, + "step": 3708 + }, + { + "epoch": 0.15304943467855078, + "grad_norm": 2.3532433770707186, + "learning_rate": 2.8825169660909966e-06, + "loss": 0.5583, + "step": 3709 + }, + { + "epoch": 0.15309069901790873, + "grad_norm": 5.304629068764891, + "learning_rate": 2.882439177913773e-06, + "loss": 0.5747, + "step": 3710 + }, + { + "epoch": 0.15313196335726664, + "grad_norm": 5.032539059235395, + "learning_rate": 2.882361365042728e-06, + "loss": 0.5922, + "step": 3711 + }, + { + "epoch": 0.1531732276966246, + "grad_norm": 11.817965421554726, + "learning_rate": 2.8822835274792504e-06, + "loss": 0.5456, + "step": 3712 + }, + { + "epoch": 0.1532144920359825, + "grad_norm": 4.0029031650717295, + "learning_rate": 2.882205665224731e-06, + "loss": 0.5577, + "step": 3713 + }, + { + "epoch": 0.15325575637534042, + "grad_norm": 4.5546988202880225, + "learning_rate": 2.8821277782805607e-06, + "loss": 0.6215, + "step": 3714 + }, + { + "epoch": 0.15329702071469836, + "grad_norm": 2.249640047492466, + "learning_rate": 2.882049866648131e-06, + "loss": 0.552, + "step": 3715 + }, + { + "epoch": 0.15333828505405628, + "grad_norm": 3.3665979972605165, + "learning_rate": 2.881971930328833e-06, + "loss": 0.5587, + "step": 3716 + }, + { + "epoch": 0.15337954939341422, + "grad_norm": 2.527318162346494, + "learning_rate": 2.881893969324059e-06, + "loss": 0.5456, + "step": 3717 + }, + { + "epoch": 0.15342081373277214, + "grad_norm": 3.882019294010855, + "learning_rate": 2.8818159836352024e-06, + "loss": 0.5847, + "step": 3718 + }, + { + "epoch": 0.15346207807213005, + "grad_norm": 3.0908879773810893, + "learning_rate": 2.8817379732636554e-06, + "loss": 0.5774, + "step": 3719 + }, + { + "epoch": 0.153503342411488, + "grad_norm": 3.423078527560021, + "learning_rate": 2.8816599382108113e-06, + "loss": 0.5329, + "step": 3720 + }, + { + "epoch": 0.1535446067508459, + "grad_norm": 19.761669932776538, + "learning_rate": 2.8815818784780643e-06, + "loss": 0.5478, + "step": 3721 + }, + { + "epoch": 0.15358587109020386, + "grad_norm": 8.139813610850409, + "learning_rate": 2.881503794066809e-06, + "loss": 0.5984, + "step": 3722 + }, + { + "epoch": 0.15362713542956177, + "grad_norm": 3.139996696275531, + "learning_rate": 2.88142568497844e-06, + "loss": 0.5695, + "step": 3723 + }, + { + "epoch": 0.1536683997689197, + "grad_norm": 3.2256248962275973, + "learning_rate": 2.8813475512143523e-06, + "loss": 0.5904, + "step": 3724 + }, + { + "epoch": 0.15370966410827763, + "grad_norm": 2.4603920547477296, + "learning_rate": 2.881269392775942e-06, + "loss": 0.6336, + "step": 3725 + }, + { + "epoch": 0.15375092844763555, + "grad_norm": 4.416034335535648, + "learning_rate": 2.8811912096646043e-06, + "loss": 0.5403, + "step": 3726 + }, + { + "epoch": 0.1537921927869935, + "grad_norm": 3.2984349534518325, + "learning_rate": 2.881113001881737e-06, + "loss": 0.5663, + "step": 3727 + }, + { + "epoch": 0.1538334571263514, + "grad_norm": 2.880330724471708, + "learning_rate": 2.881034769428736e-06, + "loss": 0.5218, + "step": 3728 + }, + { + "epoch": 0.15387472146570932, + "grad_norm": 3.6155767425732517, + "learning_rate": 2.8809565123069997e-06, + "loss": 0.5739, + "step": 3729 + }, + { + "epoch": 0.15391598580506727, + "grad_norm": 2.3988954955585644, + "learning_rate": 2.8808782305179253e-06, + "loss": 0.588, + "step": 3730 + }, + { + "epoch": 0.15395725014442518, + "grad_norm": 9.039079452413109, + "learning_rate": 2.8807999240629114e-06, + "loss": 0.5642, + "step": 3731 + }, + { + "epoch": 0.15399851448378313, + "grad_norm": 3.5804829278517545, + "learning_rate": 2.880721592943356e-06, + "loss": 0.5627, + "step": 3732 + }, + { + "epoch": 0.15403977882314104, + "grad_norm": 2.8598206630267526, + "learning_rate": 2.8806432371606597e-06, + "loss": 0.5633, + "step": 3733 + }, + { + "epoch": 0.15408104316249896, + "grad_norm": 4.794815869882796, + "learning_rate": 2.880564856716221e-06, + "loss": 0.5259, + "step": 3734 + }, + { + "epoch": 0.1541223075018569, + "grad_norm": 3.0979711660281986, + "learning_rate": 2.8804864516114403e-06, + "loss": 0.5759, + "step": 3735 + }, + { + "epoch": 0.15416357184121482, + "grad_norm": 3.5166868648163816, + "learning_rate": 2.880408021847718e-06, + "loss": 0.551, + "step": 3736 + }, + { + "epoch": 0.15420483618057274, + "grad_norm": 2.4954006650108367, + "learning_rate": 2.8803295674264552e-06, + "loss": 0.535, + "step": 3737 + }, + { + "epoch": 0.15424610051993068, + "grad_norm": 2.6972971737118248, + "learning_rate": 2.8802510883490536e-06, + "loss": 0.6055, + "step": 3738 + }, + { + "epoch": 0.1542873648592886, + "grad_norm": 2.300020626523373, + "learning_rate": 2.880172584616914e-06, + "loss": 0.5442, + "step": 3739 + }, + { + "epoch": 0.15432862919864654, + "grad_norm": 6.885953809971298, + "learning_rate": 2.88009405623144e-06, + "loss": 0.5508, + "step": 3740 + }, + { + "epoch": 0.15436989353800445, + "grad_norm": 2.804091852922098, + "learning_rate": 2.880015503194034e-06, + "loss": 0.5806, + "step": 3741 + }, + { + "epoch": 0.15441115787736237, + "grad_norm": 5.664227174610145, + "learning_rate": 2.879936925506098e-06, + "loss": 0.549, + "step": 3742 + }, + { + "epoch": 0.15445242221672031, + "grad_norm": 3.312252285082458, + "learning_rate": 2.879858323169037e-06, + "loss": 0.5913, + "step": 3743 + }, + { + "epoch": 0.15449368655607823, + "grad_norm": 2.4794788948725768, + "learning_rate": 2.879779696184254e-06, + "loss": 0.5297, + "step": 3744 + }, + { + "epoch": 0.15453495089543617, + "grad_norm": 3.3055545507149993, + "learning_rate": 2.8797010445531546e-06, + "loss": 0.5113, + "step": 3745 + }, + { + "epoch": 0.1545762152347941, + "grad_norm": 3.2095076366363076, + "learning_rate": 2.8796223682771426e-06, + "loss": 0.5372, + "step": 3746 + }, + { + "epoch": 0.154617479574152, + "grad_norm": 3.178140514476711, + "learning_rate": 2.879543667357624e-06, + "loss": 0.4812, + "step": 3747 + }, + { + "epoch": 0.15465874391350995, + "grad_norm": 3.5990560159718905, + "learning_rate": 2.879464941796004e-06, + "loss": 0.5673, + "step": 3748 + }, + { + "epoch": 0.15470000825286787, + "grad_norm": 5.038376088355263, + "learning_rate": 2.8793861915936895e-06, + "loss": 0.5832, + "step": 3749 + }, + { + "epoch": 0.1547412725922258, + "grad_norm": 2.8884616104686973, + "learning_rate": 2.8793074167520872e-06, + "loss": 0.6138, + "step": 3750 + }, + { + "epoch": 0.15478253693158373, + "grad_norm": 9.16554642774223, + "learning_rate": 2.879228617272604e-06, + "loss": 0.6122, + "step": 3751 + }, + { + "epoch": 0.15482380127094164, + "grad_norm": 3.5744989967386593, + "learning_rate": 2.879149793156647e-06, + "loss": 0.4789, + "step": 3752 + }, + { + "epoch": 0.15486506561029958, + "grad_norm": 3.1877307891849873, + "learning_rate": 2.8790709444056248e-06, + "loss": 0.5653, + "step": 3753 + }, + { + "epoch": 0.1549063299496575, + "grad_norm": 1.9743658632015653, + "learning_rate": 2.8789920710209455e-06, + "loss": 0.5726, + "step": 3754 + }, + { + "epoch": 0.15494759428901544, + "grad_norm": 4.0777734112749195, + "learning_rate": 2.878913173004018e-06, + "loss": 0.586, + "step": 3755 + }, + { + "epoch": 0.15498885862837336, + "grad_norm": 2.4880739992540075, + "learning_rate": 2.878834250356252e-06, + "loss": 0.5669, + "step": 3756 + }, + { + "epoch": 0.15503012296773128, + "grad_norm": 6.7049798634815705, + "learning_rate": 2.878755303079057e-06, + "loss": 0.5415, + "step": 3757 + }, + { + "epoch": 0.15507138730708922, + "grad_norm": 2.934820995725861, + "learning_rate": 2.878676331173843e-06, + "loss": 0.5249, + "step": 3758 + }, + { + "epoch": 0.15511265164644714, + "grad_norm": 4.17170780031894, + "learning_rate": 2.8785973346420206e-06, + "loss": 0.5494, + "step": 3759 + }, + { + "epoch": 0.15515391598580508, + "grad_norm": 3.9613532740744675, + "learning_rate": 2.878518313485001e-06, + "loss": 0.5753, + "step": 3760 + }, + { + "epoch": 0.155195180325163, + "grad_norm": 3.538066207202518, + "learning_rate": 2.8784392677041966e-06, + "loss": 0.5702, + "step": 3761 + }, + { + "epoch": 0.1552364446645209, + "grad_norm": 4.26450113907959, + "learning_rate": 2.878360197301018e-06, + "loss": 0.6079, + "step": 3762 + }, + { + "epoch": 0.15527770900387886, + "grad_norm": 2.0290774884634573, + "learning_rate": 2.8782811022768776e-06, + "loss": 0.5183, + "step": 3763 + }, + { + "epoch": 0.15531897334323677, + "grad_norm": 6.283933011913399, + "learning_rate": 2.8782019826331898e-06, + "loss": 0.566, + "step": 3764 + }, + { + "epoch": 0.15536023768259472, + "grad_norm": 2.616562665348543, + "learning_rate": 2.8781228383713664e-06, + "loss": 0.5147, + "step": 3765 + }, + { + "epoch": 0.15540150202195263, + "grad_norm": 2.6516779217227393, + "learning_rate": 2.8780436694928217e-06, + "loss": 0.5369, + "step": 3766 + }, + { + "epoch": 0.15544276636131055, + "grad_norm": 3.2541534050157233, + "learning_rate": 2.8779644759989693e-06, + "loss": 0.5411, + "step": 3767 + }, + { + "epoch": 0.1554840307006685, + "grad_norm": 2.6600116373010843, + "learning_rate": 2.877885257891225e-06, + "loss": 0.6446, + "step": 3768 + }, + { + "epoch": 0.1555252950400264, + "grad_norm": 30.10579269123039, + "learning_rate": 2.877806015171002e-06, + "loss": 0.6158, + "step": 3769 + }, + { + "epoch": 0.15556655937938432, + "grad_norm": 2.107648523892323, + "learning_rate": 2.877726747839718e-06, + "loss": 0.5638, + "step": 3770 + }, + { + "epoch": 0.15560782371874227, + "grad_norm": 3.646774646843944, + "learning_rate": 2.8776474558987867e-06, + "loss": 0.5344, + "step": 3771 + }, + { + "epoch": 0.15564908805810018, + "grad_norm": 5.17146955459025, + "learning_rate": 2.8775681393496263e-06, + "loss": 0.5912, + "step": 3772 + }, + { + "epoch": 0.15569035239745813, + "grad_norm": 2.7298548481557896, + "learning_rate": 2.8774887981936523e-06, + "loss": 0.5569, + "step": 3773 + }, + { + "epoch": 0.15573161673681604, + "grad_norm": 2.3216541240111472, + "learning_rate": 2.877409432432283e-06, + "loss": 0.5691, + "step": 3774 + }, + { + "epoch": 0.15577288107617396, + "grad_norm": 2.857527221815007, + "learning_rate": 2.8773300420669345e-06, + "loss": 0.5701, + "step": 3775 + }, + { + "epoch": 0.1558141454155319, + "grad_norm": 6.340339733997808, + "learning_rate": 2.8772506270990267e-06, + "loss": 0.5379, + "step": 3776 + }, + { + "epoch": 0.15585540975488982, + "grad_norm": 3.8716099165628184, + "learning_rate": 2.877171187529977e-06, + "loss": 0.5506, + "step": 3777 + }, + { + "epoch": 0.15589667409424776, + "grad_norm": 3.085974198250755, + "learning_rate": 2.877091723361205e-06, + "loss": 0.5455, + "step": 3778 + }, + { + "epoch": 0.15593793843360568, + "grad_norm": 22.714813797909475, + "learning_rate": 2.8770122345941295e-06, + "loss": 0.5621, + "step": 3779 + }, + { + "epoch": 0.1559792027729636, + "grad_norm": 2.724999368492771, + "learning_rate": 2.876932721230171e-06, + "loss": 0.5509, + "step": 3780 + }, + { + "epoch": 0.15602046711232154, + "grad_norm": 7.118117257267201, + "learning_rate": 2.8768531832707495e-06, + "loss": 0.5786, + "step": 3781 + }, + { + "epoch": 0.15606173145167945, + "grad_norm": 5.590453708556088, + "learning_rate": 2.876773620717286e-06, + "loss": 0.6405, + "step": 3782 + }, + { + "epoch": 0.1561029957910374, + "grad_norm": 6.506849750414524, + "learning_rate": 2.876694033571201e-06, + "loss": 0.6287, + "step": 3783 + }, + { + "epoch": 0.1561442601303953, + "grad_norm": 4.302766537033089, + "learning_rate": 2.876614421833917e-06, + "loss": 0.5817, + "step": 3784 + }, + { + "epoch": 0.15618552446975323, + "grad_norm": 3.658628040243204, + "learning_rate": 2.8765347855068554e-06, + "loss": 0.5625, + "step": 3785 + }, + { + "epoch": 0.15622678880911117, + "grad_norm": 2.4942852895051706, + "learning_rate": 2.876455124591439e-06, + "loss": 0.5285, + "step": 3786 + }, + { + "epoch": 0.1562680531484691, + "grad_norm": 3.171254978361825, + "learning_rate": 2.8763754390890906e-06, + "loss": 0.5857, + "step": 3787 + }, + { + "epoch": 0.15630931748782703, + "grad_norm": 27.68794675725489, + "learning_rate": 2.8762957290012337e-06, + "loss": 0.5913, + "step": 3788 + }, + { + "epoch": 0.15635058182718495, + "grad_norm": 5.919351902591996, + "learning_rate": 2.876215994329293e-06, + "loss": 0.596, + "step": 3789 + }, + { + "epoch": 0.15639184616654286, + "grad_norm": 2.651263654762692, + "learning_rate": 2.8761362350746906e-06, + "loss": 0.511, + "step": 3790 + }, + { + "epoch": 0.1564331105059008, + "grad_norm": 4.242895845430623, + "learning_rate": 2.876056451238853e-06, + "loss": 0.5593, + "step": 3791 + }, + { + "epoch": 0.15647437484525872, + "grad_norm": 3.80309545792659, + "learning_rate": 2.875976642823205e-06, + "loss": 0.5298, + "step": 3792 + }, + { + "epoch": 0.15651563918461667, + "grad_norm": 8.227763837726496, + "learning_rate": 2.8758968098291716e-06, + "loss": 0.615, + "step": 3793 + }, + { + "epoch": 0.15655690352397458, + "grad_norm": 3.210856325599015, + "learning_rate": 2.8758169522581796e-06, + "loss": 0.5692, + "step": 3794 + }, + { + "epoch": 0.1565981678633325, + "grad_norm": 4.593239550977863, + "learning_rate": 2.875737070111655e-06, + "loss": 0.5463, + "step": 3795 + }, + { + "epoch": 0.15663943220269044, + "grad_norm": 4.980190967558423, + "learning_rate": 2.8756571633910243e-06, + "loss": 0.5091, + "step": 3796 + }, + { + "epoch": 0.15668069654204836, + "grad_norm": 2.5513700632876732, + "learning_rate": 2.875577232097716e-06, + "loss": 0.5753, + "step": 3797 + }, + { + "epoch": 0.1567219608814063, + "grad_norm": 3.425409440669745, + "learning_rate": 2.8754972762331565e-06, + "loss": 0.5644, + "step": 3798 + }, + { + "epoch": 0.15676322522076422, + "grad_norm": 2.9796590275960835, + "learning_rate": 2.875417295798775e-06, + "loss": 0.6352, + "step": 3799 + }, + { + "epoch": 0.15680448956012213, + "grad_norm": 4.8671614307464806, + "learning_rate": 2.8753372907960003e-06, + "loss": 0.5602, + "step": 3800 + }, + { + "epoch": 0.15684575389948008, + "grad_norm": 2.778400772881061, + "learning_rate": 2.8752572612262604e-06, + "loss": 0.566, + "step": 3801 + }, + { + "epoch": 0.156887018238838, + "grad_norm": 4.425812535066303, + "learning_rate": 2.8751772070909858e-06, + "loss": 0.5549, + "step": 3802 + }, + { + "epoch": 0.1569282825781959, + "grad_norm": 3.204914782391837, + "learning_rate": 2.8750971283916057e-06, + "loss": 0.5834, + "step": 3803 + }, + { + "epoch": 0.15696954691755385, + "grad_norm": 3.686378022761524, + "learning_rate": 2.8750170251295514e-06, + "loss": 0.5431, + "step": 3804 + }, + { + "epoch": 0.15701081125691177, + "grad_norm": 3.600732902687367, + "learning_rate": 2.874936897306253e-06, + "loss": 0.6193, + "step": 3805 + }, + { + "epoch": 0.1570520755962697, + "grad_norm": 2.956287292264993, + "learning_rate": 2.874856744923142e-06, + "loss": 0.5574, + "step": 3806 + }, + { + "epoch": 0.15709333993562763, + "grad_norm": 2.569893965049346, + "learning_rate": 2.8747765679816504e-06, + "loss": 0.5768, + "step": 3807 + }, + { + "epoch": 0.15713460427498555, + "grad_norm": 3.531079646395974, + "learning_rate": 2.8746963664832098e-06, + "loss": 0.6117, + "step": 3808 + }, + { + "epoch": 0.1571758686143435, + "grad_norm": 2.527302147857922, + "learning_rate": 2.874616140429253e-06, + "loss": 0.6087, + "step": 3809 + }, + { + "epoch": 0.1572171329537014, + "grad_norm": 5.803630171493117, + "learning_rate": 2.8745358898212135e-06, + "loss": 0.5159, + "step": 3810 + }, + { + "epoch": 0.15725839729305935, + "grad_norm": 3.430824206265929, + "learning_rate": 2.8744556146605247e-06, + "loss": 0.5485, + "step": 3811 + }, + { + "epoch": 0.15729966163241726, + "grad_norm": 6.478392589164509, + "learning_rate": 2.8743753149486197e-06, + "loss": 0.5545, + "step": 3812 + }, + { + "epoch": 0.15734092597177518, + "grad_norm": 6.153825707829338, + "learning_rate": 2.8742949906869335e-06, + "loss": 0.4831, + "step": 3813 + }, + { + "epoch": 0.15738219031113312, + "grad_norm": 2.736565116766391, + "learning_rate": 2.8742146418769015e-06, + "loss": 0.5584, + "step": 3814 + }, + { + "epoch": 0.15742345465049104, + "grad_norm": 3.2469779288536107, + "learning_rate": 2.8741342685199575e-06, + "loss": 0.5152, + "step": 3815 + }, + { + "epoch": 0.15746471898984898, + "grad_norm": 15.355637487181804, + "learning_rate": 2.874053870617538e-06, + "loss": 0.5162, + "step": 3816 + }, + { + "epoch": 0.1575059833292069, + "grad_norm": 3.2871524741112714, + "learning_rate": 2.8739734481710792e-06, + "loss": 0.5441, + "step": 3817 + }, + { + "epoch": 0.15754724766856482, + "grad_norm": 4.513820485080999, + "learning_rate": 2.8738930011820177e-06, + "loss": 0.6157, + "step": 3818 + }, + { + "epoch": 0.15758851200792276, + "grad_norm": 2.689089189524479, + "learning_rate": 2.8738125296517897e-06, + "loss": 0.5833, + "step": 3819 + }, + { + "epoch": 0.15762977634728068, + "grad_norm": 3.613155891544201, + "learning_rate": 2.8737320335818335e-06, + "loss": 0.5936, + "step": 3820 + }, + { + "epoch": 0.15767104068663862, + "grad_norm": 5.207223464892033, + "learning_rate": 2.873651512973587e-06, + "loss": 0.5239, + "step": 3821 + }, + { + "epoch": 0.15771230502599654, + "grad_norm": 2.4201100368594672, + "learning_rate": 2.8735709678284873e-06, + "loss": 0.5746, + "step": 3822 + }, + { + "epoch": 0.15775356936535445, + "grad_norm": 2.342993788513901, + "learning_rate": 2.8734903981479747e-06, + "loss": 0.5382, + "step": 3823 + }, + { + "epoch": 0.1577948337047124, + "grad_norm": 5.155549924196559, + "learning_rate": 2.873409803933487e-06, + "loss": 0.5438, + "step": 3824 + }, + { + "epoch": 0.1578360980440703, + "grad_norm": 2.9479705268865555, + "learning_rate": 2.8733291851864645e-06, + "loss": 0.5841, + "step": 3825 + }, + { + "epoch": 0.15787736238342825, + "grad_norm": 2.1574600354552627, + "learning_rate": 2.8732485419083476e-06, + "loss": 0.5365, + "step": 3826 + }, + { + "epoch": 0.15791862672278617, + "grad_norm": 3.305755066478937, + "learning_rate": 2.873167874100576e-06, + "loss": 0.6202, + "step": 3827 + }, + { + "epoch": 0.1579598910621441, + "grad_norm": 5.955087987953953, + "learning_rate": 2.8730871817645917e-06, + "loss": 0.6128, + "step": 3828 + }, + { + "epoch": 0.15800115540150203, + "grad_norm": 2.069504223360612, + "learning_rate": 2.8730064649018347e-06, + "loss": 0.5591, + "step": 3829 + }, + { + "epoch": 0.15804241974085995, + "grad_norm": 3.505621336581116, + "learning_rate": 2.872925723513748e-06, + "loss": 0.598, + "step": 3830 + }, + { + "epoch": 0.15808368408021786, + "grad_norm": 3.2305670456775553, + "learning_rate": 2.8728449576017726e-06, + "loss": 0.5871, + "step": 3831 + }, + { + "epoch": 0.1581249484195758, + "grad_norm": 3.6425648946413194, + "learning_rate": 2.872764167167353e-06, + "loss": 0.553, + "step": 3832 + }, + { + "epoch": 0.15816621275893372, + "grad_norm": 13.32690249394627, + "learning_rate": 2.872683352211931e-06, + "loss": 0.5242, + "step": 3833 + }, + { + "epoch": 0.15820747709829167, + "grad_norm": 7.5288636801493025, + "learning_rate": 2.8726025127369496e-06, + "loss": 0.5869, + "step": 3834 + }, + { + "epoch": 0.15824874143764958, + "grad_norm": 4.29929837215957, + "learning_rate": 2.8725216487438544e-06, + "loss": 0.6209, + "step": 3835 + }, + { + "epoch": 0.1582900057770075, + "grad_norm": 2.8254889437268353, + "learning_rate": 2.872440760234089e-06, + "loss": 0.5814, + "step": 3836 + }, + { + "epoch": 0.15833127011636544, + "grad_norm": 2.748315478377538, + "learning_rate": 2.8723598472090972e-06, + "loss": 0.5874, + "step": 3837 + }, + { + "epoch": 0.15837253445572336, + "grad_norm": 3.5449879210810984, + "learning_rate": 2.8722789096703263e-06, + "loss": 0.529, + "step": 3838 + }, + { + "epoch": 0.1584137987950813, + "grad_norm": 2.5541748009407415, + "learning_rate": 2.8721979476192214e-06, + "loss": 0.5567, + "step": 3839 + }, + { + "epoch": 0.15845506313443922, + "grad_norm": 3.3743426190522383, + "learning_rate": 2.872116961057228e-06, + "loss": 0.5361, + "step": 3840 + }, + { + "epoch": 0.15849632747379713, + "grad_norm": 3.9635282060068215, + "learning_rate": 2.8720359499857935e-06, + "loss": 0.5909, + "step": 3841 + }, + { + "epoch": 0.15853759181315508, + "grad_norm": 2.590012415558124, + "learning_rate": 2.8719549144063644e-06, + "loss": 0.5483, + "step": 3842 + }, + { + "epoch": 0.158578856152513, + "grad_norm": 2.498378128163724, + "learning_rate": 2.8718738543203883e-06, + "loss": 0.5412, + "step": 3843 + }, + { + "epoch": 0.15862012049187094, + "grad_norm": 2.7872841026341653, + "learning_rate": 2.8717927697293133e-06, + "loss": 0.5635, + "step": 3844 + }, + { + "epoch": 0.15866138483122885, + "grad_norm": 4.846955867349428, + "learning_rate": 2.871711660634588e-06, + "loss": 0.5775, + "step": 3845 + }, + { + "epoch": 0.15870264917058677, + "grad_norm": 2.9052263188946617, + "learning_rate": 2.8716305270376604e-06, + "loss": 0.5897, + "step": 3846 + }, + { + "epoch": 0.1587439135099447, + "grad_norm": 2.3499604370281073, + "learning_rate": 2.871549368939981e-06, + "loss": 0.5187, + "step": 3847 + }, + { + "epoch": 0.15878517784930263, + "grad_norm": 1.9058618708071815, + "learning_rate": 2.8714681863429983e-06, + "loss": 0.5391, + "step": 3848 + }, + { + "epoch": 0.15882644218866057, + "grad_norm": 2.573924948379878, + "learning_rate": 2.8713869792481633e-06, + "loss": 0.4886, + "step": 3849 + }, + { + "epoch": 0.1588677065280185, + "grad_norm": 2.3963248136058892, + "learning_rate": 2.871305747656926e-06, + "loss": 0.5687, + "step": 3850 + }, + { + "epoch": 0.1589089708673764, + "grad_norm": 3.487804374876057, + "learning_rate": 2.8712244915707373e-06, + "loss": 0.5618, + "step": 3851 + }, + { + "epoch": 0.15895023520673435, + "grad_norm": 2.528498975108843, + "learning_rate": 2.871143210991049e-06, + "loss": 0.4982, + "step": 3852 + }, + { + "epoch": 0.15899149954609226, + "grad_norm": 9.351509069810877, + "learning_rate": 2.871061905919313e-06, + "loss": 0.5499, + "step": 3853 + }, + { + "epoch": 0.1590327638854502, + "grad_norm": 2.991056364500316, + "learning_rate": 2.8709805763569816e-06, + "loss": 0.5997, + "step": 3854 + }, + { + "epoch": 0.15907402822480812, + "grad_norm": 2.660888169125166, + "learning_rate": 2.8708992223055077e-06, + "loss": 0.5486, + "step": 3855 + }, + { + "epoch": 0.15911529256416604, + "grad_norm": 2.2864146601531226, + "learning_rate": 2.8708178437663435e-06, + "loss": 0.5509, + "step": 3856 + }, + { + "epoch": 0.15915655690352398, + "grad_norm": 3.4092167070312662, + "learning_rate": 2.8707364407409437e-06, + "loss": 0.5656, + "step": 3857 + }, + { + "epoch": 0.1591978212428819, + "grad_norm": 2.700395886561967, + "learning_rate": 2.870655013230762e-06, + "loss": 0.5095, + "step": 3858 + }, + { + "epoch": 0.15923908558223984, + "grad_norm": 2.7748687539500967, + "learning_rate": 2.870573561237253e-06, + "loss": 0.5478, + "step": 3859 + }, + { + "epoch": 0.15928034992159776, + "grad_norm": 2.6773089171525903, + "learning_rate": 2.8704920847618714e-06, + "loss": 0.5206, + "step": 3860 + }, + { + "epoch": 0.15932161426095567, + "grad_norm": 2.6530066353881545, + "learning_rate": 2.8704105838060726e-06, + "loss": 0.5934, + "step": 3861 + }, + { + "epoch": 0.15936287860031362, + "grad_norm": 2.3987731153082836, + "learning_rate": 2.870329058371313e-06, + "loss": 0.5424, + "step": 3862 + }, + { + "epoch": 0.15940414293967153, + "grad_norm": 3.139246551670902, + "learning_rate": 2.870247508459048e-06, + "loss": 0.5886, + "step": 3863 + }, + { + "epoch": 0.15944540727902945, + "grad_norm": 4.346679565133374, + "learning_rate": 2.8701659340707344e-06, + "loss": 0.6073, + "step": 3864 + }, + { + "epoch": 0.1594866716183874, + "grad_norm": 4.451065132222951, + "learning_rate": 2.87008433520783e-06, + "loss": 0.5443, + "step": 3865 + }, + { + "epoch": 0.1595279359577453, + "grad_norm": 2.2590557673389022, + "learning_rate": 2.870002711871792e-06, + "loss": 0.5789, + "step": 3866 + }, + { + "epoch": 0.15956920029710325, + "grad_norm": 2.5261474734951714, + "learning_rate": 2.869921064064078e-06, + "loss": 0.5897, + "step": 3867 + }, + { + "epoch": 0.15961046463646117, + "grad_norm": 2.3903748382773418, + "learning_rate": 2.8698393917861473e-06, + "loss": 0.5603, + "step": 3868 + }, + { + "epoch": 0.15965172897581908, + "grad_norm": 3.2237411969678433, + "learning_rate": 2.8697576950394578e-06, + "loss": 0.5553, + "step": 3869 + }, + { + "epoch": 0.15969299331517703, + "grad_norm": 2.058995178416964, + "learning_rate": 2.8696759738254694e-06, + "loss": 0.5221, + "step": 3870 + }, + { + "epoch": 0.15973425765453494, + "grad_norm": 2.4626224787702906, + "learning_rate": 2.8695942281456416e-06, + "loss": 0.5473, + "step": 3871 + }, + { + "epoch": 0.1597755219938929, + "grad_norm": 7.404363159332616, + "learning_rate": 2.8695124580014348e-06, + "loss": 0.5498, + "step": 3872 + }, + { + "epoch": 0.1598167863332508, + "grad_norm": 2.171693298256864, + "learning_rate": 2.8694306633943095e-06, + "loss": 0.5325, + "step": 3873 + }, + { + "epoch": 0.15985805067260872, + "grad_norm": 5.034373589704739, + "learning_rate": 2.8693488443257265e-06, + "loss": 0.6015, + "step": 3874 + }, + { + "epoch": 0.15989931501196666, + "grad_norm": 3.4262082761726815, + "learning_rate": 2.869267000797148e-06, + "loss": 0.6132, + "step": 3875 + }, + { + "epoch": 0.15994057935132458, + "grad_norm": 4.212358868738551, + "learning_rate": 2.8691851328100352e-06, + "loss": 0.5569, + "step": 3876 + }, + { + "epoch": 0.15998184369068252, + "grad_norm": 4.790724031229941, + "learning_rate": 2.869103240365851e-06, + "loss": 0.5922, + "step": 3877 + }, + { + "epoch": 0.16002310803004044, + "grad_norm": 3.4465494158071484, + "learning_rate": 2.8690213234660578e-06, + "loss": 0.6349, + "step": 3878 + }, + { + "epoch": 0.16006437236939836, + "grad_norm": 6.391644486642624, + "learning_rate": 2.8689393821121193e-06, + "loss": 0.6072, + "step": 3879 + }, + { + "epoch": 0.1601056367087563, + "grad_norm": 3.324460336744097, + "learning_rate": 2.8688574163054984e-06, + "loss": 0.5715, + "step": 3880 + }, + { + "epoch": 0.16014690104811422, + "grad_norm": 4.0971489059236825, + "learning_rate": 2.8687754260476595e-06, + "loss": 0.5536, + "step": 3881 + }, + { + "epoch": 0.16018816538747216, + "grad_norm": 2.2186117477769343, + "learning_rate": 2.8686934113400677e-06, + "loss": 0.5637, + "step": 3882 + }, + { + "epoch": 0.16022942972683007, + "grad_norm": 4.028638583010439, + "learning_rate": 2.868611372184188e-06, + "loss": 0.5512, + "step": 3883 + }, + { + "epoch": 0.160270694066188, + "grad_norm": 3.858514554094413, + "learning_rate": 2.868529308581485e-06, + "loss": 0.573, + "step": 3884 + }, + { + "epoch": 0.16031195840554593, + "grad_norm": 1.8390356376190575, + "learning_rate": 2.868447220533425e-06, + "loss": 0.5299, + "step": 3885 + }, + { + "epoch": 0.16035322274490385, + "grad_norm": 2.5323463858301234, + "learning_rate": 2.8683651080414745e-06, + "loss": 0.5549, + "step": 3886 + }, + { + "epoch": 0.1603944870842618, + "grad_norm": 5.773870242423404, + "learning_rate": 2.8682829711071e-06, + "loss": 0.5775, + "step": 3887 + }, + { + "epoch": 0.1604357514236197, + "grad_norm": 12.453521912999454, + "learning_rate": 2.8682008097317685e-06, + "loss": 0.5832, + "step": 3888 + }, + { + "epoch": 0.16047701576297763, + "grad_norm": 2.7641093683480706, + "learning_rate": 2.868118623916948e-06, + "loss": 0.5769, + "step": 3889 + }, + { + "epoch": 0.16051828010233557, + "grad_norm": 4.5666865825031495, + "learning_rate": 2.868036413664106e-06, + "loss": 0.5462, + "step": 3890 + }, + { + "epoch": 0.16055954444169349, + "grad_norm": 7.036494758464516, + "learning_rate": 2.867954178974712e-06, + "loss": 0.5828, + "step": 3891 + }, + { + "epoch": 0.1606008087810514, + "grad_norm": 4.847219402682734, + "learning_rate": 2.867871919850234e-06, + "loss": 0.5741, + "step": 3892 + }, + { + "epoch": 0.16064207312040935, + "grad_norm": 3.4317783186868507, + "learning_rate": 2.8677896362921412e-06, + "loss": 0.527, + "step": 3893 + }, + { + "epoch": 0.16068333745976726, + "grad_norm": 2.6604739121834093, + "learning_rate": 2.8677073283019046e-06, + "loss": 0.5614, + "step": 3894 + }, + { + "epoch": 0.1607246017991252, + "grad_norm": 3.3720534715491803, + "learning_rate": 2.867624995880993e-06, + "loss": 0.5954, + "step": 3895 + }, + { + "epoch": 0.16076586613848312, + "grad_norm": 2.755614367747526, + "learning_rate": 2.867542639030878e-06, + "loss": 0.5293, + "step": 3896 + }, + { + "epoch": 0.16080713047784104, + "grad_norm": 2.2036300703472103, + "learning_rate": 2.8674602577530307e-06, + "loss": 0.5385, + "step": 3897 + }, + { + "epoch": 0.16084839481719898, + "grad_norm": 4.292917455567612, + "learning_rate": 2.867377852048922e-06, + "loss": 0.5982, + "step": 3898 + }, + { + "epoch": 0.1608896591565569, + "grad_norm": 2.4286444178391995, + "learning_rate": 2.8672954219200238e-06, + "loss": 0.5633, + "step": 3899 + }, + { + "epoch": 0.16093092349591484, + "grad_norm": 2.1535305022659843, + "learning_rate": 2.8672129673678096e-06, + "loss": 0.5933, + "step": 3900 + }, + { + "epoch": 0.16097218783527276, + "grad_norm": 2.2663223258719434, + "learning_rate": 2.867130488393751e-06, + "loss": 0.5448, + "step": 3901 + }, + { + "epoch": 0.16101345217463067, + "grad_norm": 2.975215604850655, + "learning_rate": 2.867047984999322e-06, + "loss": 0.5667, + "step": 3902 + }, + { + "epoch": 0.16105471651398862, + "grad_norm": 2.7607380235546537, + "learning_rate": 2.8669654571859964e-06, + "loss": 0.5704, + "step": 3903 + }, + { + "epoch": 0.16109598085334653, + "grad_norm": 3.0745684335310015, + "learning_rate": 2.866882904955248e-06, + "loss": 0.5758, + "step": 3904 + }, + { + "epoch": 0.16113724519270448, + "grad_norm": 4.247391907870252, + "learning_rate": 2.8668003283085514e-06, + "loss": 0.5951, + "step": 3905 + }, + { + "epoch": 0.1611785095320624, + "grad_norm": 4.51367753403607, + "learning_rate": 2.866717727247382e-06, + "loss": 0.5626, + "step": 3906 + }, + { + "epoch": 0.1612197738714203, + "grad_norm": 2.9882485303046775, + "learning_rate": 2.866635101773215e-06, + "loss": 0.5586, + "step": 3907 + }, + { + "epoch": 0.16126103821077825, + "grad_norm": 3.912481855742156, + "learning_rate": 2.8665524518875256e-06, + "loss": 0.5897, + "step": 3908 + }, + { + "epoch": 0.16130230255013617, + "grad_norm": 6.519001823416214, + "learning_rate": 2.8664697775917914e-06, + "loss": 0.6099, + "step": 3909 + }, + { + "epoch": 0.1613435668894941, + "grad_norm": 3.6885828832146403, + "learning_rate": 2.8663870788874882e-06, + "loss": 0.601, + "step": 3910 + }, + { + "epoch": 0.16138483122885203, + "grad_norm": 2.0763053691895554, + "learning_rate": 2.866304355776094e-06, + "loss": 0.5576, + "step": 3911 + }, + { + "epoch": 0.16142609556820994, + "grad_norm": 2.2712448934682676, + "learning_rate": 2.866221608259086e-06, + "loss": 0.5683, + "step": 3912 + }, + { + "epoch": 0.1614673599075679, + "grad_norm": 2.6218186270554624, + "learning_rate": 2.8661388363379422e-06, + "loss": 0.5122, + "step": 3913 + }, + { + "epoch": 0.1615086242469258, + "grad_norm": 2.5091746619551967, + "learning_rate": 2.8660560400141414e-06, + "loss": 0.5138, + "step": 3914 + }, + { + "epoch": 0.16154988858628375, + "grad_norm": 11.611859894916464, + "learning_rate": 2.865973219289162e-06, + "loss": 0.5251, + "step": 3915 + }, + { + "epoch": 0.16159115292564166, + "grad_norm": 2.4447800568257176, + "learning_rate": 2.865890374164484e-06, + "loss": 0.5705, + "step": 3916 + }, + { + "epoch": 0.16163241726499958, + "grad_norm": 2.089907320494799, + "learning_rate": 2.865807504641587e-06, + "loss": 0.525, + "step": 3917 + }, + { + "epoch": 0.16167368160435752, + "grad_norm": 2.9797607175538805, + "learning_rate": 2.8657246107219512e-06, + "loss": 0.5554, + "step": 3918 + }, + { + "epoch": 0.16171494594371544, + "grad_norm": 11.09335105047027, + "learning_rate": 2.865641692407057e-06, + "loss": 0.5976, + "step": 3919 + }, + { + "epoch": 0.16175621028307338, + "grad_norm": 3.31182560629658, + "learning_rate": 2.865558749698386e-06, + "loss": 0.6042, + "step": 3920 + }, + { + "epoch": 0.1617974746224313, + "grad_norm": 3.0335897500849027, + "learning_rate": 2.8654757825974203e-06, + "loss": 0.5577, + "step": 3921 + }, + { + "epoch": 0.1618387389617892, + "grad_norm": 5.1462681628900855, + "learning_rate": 2.8653927911056406e-06, + "loss": 0.572, + "step": 3922 + }, + { + "epoch": 0.16188000330114716, + "grad_norm": 2.654628828819479, + "learning_rate": 2.86530977522453e-06, + "loss": 0.5429, + "step": 3923 + }, + { + "epoch": 0.16192126764050507, + "grad_norm": 2.671212276321846, + "learning_rate": 2.8652267349555716e-06, + "loss": 0.563, + "step": 3924 + }, + { + "epoch": 0.161962531979863, + "grad_norm": 4.639617755614131, + "learning_rate": 2.8651436703002483e-06, + "loss": 0.5798, + "step": 3925 + }, + { + "epoch": 0.16200379631922093, + "grad_norm": 2.58003044729139, + "learning_rate": 2.865060581260044e-06, + "loss": 0.5449, + "step": 3926 + }, + { + "epoch": 0.16204506065857885, + "grad_norm": 2.478800741207068, + "learning_rate": 2.8649774678364427e-06, + "loss": 0.5938, + "step": 3927 + }, + { + "epoch": 0.1620863249979368, + "grad_norm": 2.565452739544859, + "learning_rate": 2.8648943300309293e-06, + "loss": 0.6482, + "step": 3928 + }, + { + "epoch": 0.1621275893372947, + "grad_norm": 5.2615760324879925, + "learning_rate": 2.8648111678449887e-06, + "loss": 0.5223, + "step": 3929 + }, + { + "epoch": 0.16216885367665262, + "grad_norm": 4.104976964459226, + "learning_rate": 2.8647279812801066e-06, + "loss": 0.551, + "step": 3930 + }, + { + "epoch": 0.16221011801601057, + "grad_norm": 2.8883017282646453, + "learning_rate": 2.864644770337769e-06, + "loss": 0.5785, + "step": 3931 + }, + { + "epoch": 0.16225138235536848, + "grad_norm": 3.4963792961621913, + "learning_rate": 2.864561535019461e-06, + "loss": 0.5397, + "step": 3932 + }, + { + "epoch": 0.16229264669472643, + "grad_norm": 1.8856442215922053, + "learning_rate": 2.864478275326671e-06, + "loss": 0.5344, + "step": 3933 + }, + { + "epoch": 0.16233391103408434, + "grad_norm": 3.6071014313085388, + "learning_rate": 2.8643949912608855e-06, + "loss": 0.562, + "step": 3934 + }, + { + "epoch": 0.16237517537344226, + "grad_norm": 8.859868699082073, + "learning_rate": 2.8643116828235922e-06, + "loss": 0.5673, + "step": 3935 + }, + { + "epoch": 0.1624164397128002, + "grad_norm": 2.3790328124863254, + "learning_rate": 2.8642283500162797e-06, + "loss": 0.5913, + "step": 3936 + }, + { + "epoch": 0.16245770405215812, + "grad_norm": 6.475024082134611, + "learning_rate": 2.8641449928404354e-06, + "loss": 0.5793, + "step": 3937 + }, + { + "epoch": 0.16249896839151606, + "grad_norm": 4.667454389980321, + "learning_rate": 2.8640616112975496e-06, + "loss": 0.6281, + "step": 3938 + }, + { + "epoch": 0.16254023273087398, + "grad_norm": 3.0514030337098488, + "learning_rate": 2.8639782053891105e-06, + "loss": 0.5736, + "step": 3939 + }, + { + "epoch": 0.1625814970702319, + "grad_norm": 4.939358532964198, + "learning_rate": 2.8638947751166085e-06, + "loss": 0.5944, + "step": 3940 + }, + { + "epoch": 0.16262276140958984, + "grad_norm": 6.233963734996873, + "learning_rate": 2.8638113204815344e-06, + "loss": 0.5669, + "step": 3941 + }, + { + "epoch": 0.16266402574894775, + "grad_norm": 5.92319204097386, + "learning_rate": 2.863727841485378e-06, + "loss": 0.5679, + "step": 3942 + }, + { + "epoch": 0.1627052900883057, + "grad_norm": 3.0799345666941584, + "learning_rate": 2.863644338129631e-06, + "loss": 0.5678, + "step": 3943 + }, + { + "epoch": 0.16274655442766361, + "grad_norm": 3.7855161198068896, + "learning_rate": 2.8635608104157847e-06, + "loss": 0.552, + "step": 3944 + }, + { + "epoch": 0.16278781876702153, + "grad_norm": 5.729019408691964, + "learning_rate": 2.863477258345331e-06, + "loss": 0.5926, + "step": 3945 + }, + { + "epoch": 0.16282908310637947, + "grad_norm": 2.7962935826648927, + "learning_rate": 2.8633936819197633e-06, + "loss": 0.5113, + "step": 3946 + }, + { + "epoch": 0.1628703474457374, + "grad_norm": 3.838696644236081, + "learning_rate": 2.863310081140573e-06, + "loss": 0.5744, + "step": 3947 + }, + { + "epoch": 0.16291161178509533, + "grad_norm": 2.7243243026608233, + "learning_rate": 2.8632264560092545e-06, + "loss": 0.5678, + "step": 3948 + }, + { + "epoch": 0.16295287612445325, + "grad_norm": 9.13583116657738, + "learning_rate": 2.863142806527301e-06, + "loss": 0.5553, + "step": 3949 + }, + { + "epoch": 0.16299414046381117, + "grad_norm": 2.0204170244729642, + "learning_rate": 2.8630591326962074e-06, + "loss": 0.5972, + "step": 3950 + }, + { + "epoch": 0.1630354048031691, + "grad_norm": 4.473631981743832, + "learning_rate": 2.8629754345174672e-06, + "loss": 0.5201, + "step": 3951 + }, + { + "epoch": 0.16307666914252703, + "grad_norm": 5.124034390857788, + "learning_rate": 2.8628917119925768e-06, + "loss": 0.5391, + "step": 3952 + }, + { + "epoch": 0.16311793348188494, + "grad_norm": 2.3470600424600736, + "learning_rate": 2.86280796512303e-06, + "loss": 0.5227, + "step": 3953 + }, + { + "epoch": 0.16315919782124289, + "grad_norm": 2.487163091019554, + "learning_rate": 2.8627241939103246e-06, + "loss": 0.5869, + "step": 3954 + }, + { + "epoch": 0.1632004621606008, + "grad_norm": 5.047959603956368, + "learning_rate": 2.862640398355956e-06, + "loss": 0.531, + "step": 3955 + }, + { + "epoch": 0.16324172649995874, + "grad_norm": 2.7477969063346928, + "learning_rate": 2.862556578461421e-06, + "loss": 0.5351, + "step": 3956 + }, + { + "epoch": 0.16328299083931666, + "grad_norm": 4.984969817795128, + "learning_rate": 2.862472734228217e-06, + "loss": 0.6006, + "step": 3957 + }, + { + "epoch": 0.16332425517867458, + "grad_norm": 2.1894125300465226, + "learning_rate": 2.862388865657841e-06, + "loss": 0.5852, + "step": 3958 + }, + { + "epoch": 0.16336551951803252, + "grad_norm": 3.9792086882020374, + "learning_rate": 2.862304972751792e-06, + "loss": 0.5626, + "step": 3959 + }, + { + "epoch": 0.16340678385739044, + "grad_norm": 2.945572412026471, + "learning_rate": 2.8622210555115682e-06, + "loss": 0.5438, + "step": 3960 + }, + { + "epoch": 0.16344804819674838, + "grad_norm": 2.8140201095108615, + "learning_rate": 2.862137113938669e-06, + "loss": 0.5352, + "step": 3961 + }, + { + "epoch": 0.1634893125361063, + "grad_norm": 3.0250572317478173, + "learning_rate": 2.8620531480345932e-06, + "loss": 0.5349, + "step": 3962 + }, + { + "epoch": 0.1635305768754642, + "grad_norm": 2.8161299672525626, + "learning_rate": 2.8619691578008405e-06, + "loss": 0.5459, + "step": 3963 + }, + { + "epoch": 0.16357184121482216, + "grad_norm": 3.107044564870041, + "learning_rate": 2.861885143238912e-06, + "loss": 0.6037, + "step": 3964 + }, + { + "epoch": 0.16361310555418007, + "grad_norm": 2.2843104965493097, + "learning_rate": 2.8618011043503075e-06, + "loss": 0.5318, + "step": 3965 + }, + { + "epoch": 0.16365436989353802, + "grad_norm": 5.15291098110044, + "learning_rate": 2.8617170411365285e-06, + "loss": 0.5309, + "step": 3966 + }, + { + "epoch": 0.16369563423289593, + "grad_norm": 7.752254358696754, + "learning_rate": 2.861632953599077e-06, + "loss": 0.5315, + "step": 3967 + }, + { + "epoch": 0.16373689857225385, + "grad_norm": 5.3951925787904536, + "learning_rate": 2.8615488417394548e-06, + "loss": 0.5308, + "step": 3968 + }, + { + "epoch": 0.1637781629116118, + "grad_norm": 3.02319854521102, + "learning_rate": 2.861464705559164e-06, + "loss": 0.565, + "step": 3969 + }, + { + "epoch": 0.1638194272509697, + "grad_norm": 2.0365713887342296, + "learning_rate": 2.8613805450597077e-06, + "loss": 0.5217, + "step": 3970 + }, + { + "epoch": 0.16386069159032765, + "grad_norm": 3.0594868532790325, + "learning_rate": 2.8612963602425893e-06, + "loss": 0.5889, + "step": 3971 + }, + { + "epoch": 0.16390195592968557, + "grad_norm": 2.4864914437040957, + "learning_rate": 2.861212151109312e-06, + "loss": 0.5699, + "step": 3972 + }, + { + "epoch": 0.16394322026904348, + "grad_norm": 3.4380547050384855, + "learning_rate": 2.8611279176613806e-06, + "loss": 0.5941, + "step": 3973 + }, + { + "epoch": 0.16398448460840143, + "grad_norm": 4.089358647426037, + "learning_rate": 2.8610436599003e-06, + "loss": 0.5674, + "step": 3974 + }, + { + "epoch": 0.16402574894775934, + "grad_norm": 6.7839059068847645, + "learning_rate": 2.860959377827575e-06, + "loss": 0.6, + "step": 3975 + }, + { + "epoch": 0.16406701328711729, + "grad_norm": 2.3518023269759745, + "learning_rate": 2.86087507144471e-06, + "loss": 0.5592, + "step": 3976 + }, + { + "epoch": 0.1641082776264752, + "grad_norm": 2.861806292516859, + "learning_rate": 2.8607907407532127e-06, + "loss": 0.5714, + "step": 3977 + }, + { + "epoch": 0.16414954196583312, + "grad_norm": 2.5767468465984136, + "learning_rate": 2.860706385754588e-06, + "loss": 0.5772, + "step": 3978 + }, + { + "epoch": 0.16419080630519106, + "grad_norm": 3.0081461019750524, + "learning_rate": 2.8606220064503435e-06, + "loss": 0.559, + "step": 3979 + }, + { + "epoch": 0.16423207064454898, + "grad_norm": 5.368136602455857, + "learning_rate": 2.860537602841986e-06, + "loss": 0.5433, + "step": 3980 + }, + { + "epoch": 0.16427333498390692, + "grad_norm": 4.684972756790065, + "learning_rate": 2.860453174931024e-06, + "loss": 0.5861, + "step": 3981 + }, + { + "epoch": 0.16431459932326484, + "grad_norm": 2.818653101969332, + "learning_rate": 2.860368722718964e-06, + "loss": 0.5073, + "step": 3982 + }, + { + "epoch": 0.16435586366262275, + "grad_norm": 3.4814380943856786, + "learning_rate": 2.8602842462073166e-06, + "loss": 0.5037, + "step": 3983 + }, + { + "epoch": 0.1643971280019807, + "grad_norm": 5.542209114980051, + "learning_rate": 2.860199745397589e-06, + "loss": 0.5257, + "step": 3984 + }, + { + "epoch": 0.1644383923413386, + "grad_norm": 2.296719356948211, + "learning_rate": 2.8601152202912913e-06, + "loss": 0.6056, + "step": 3985 + }, + { + "epoch": 0.16447965668069653, + "grad_norm": 5.11438987242061, + "learning_rate": 2.8600306708899333e-06, + "loss": 0.5183, + "step": 3986 + }, + { + "epoch": 0.16452092102005447, + "grad_norm": 6.31153266088967, + "learning_rate": 2.8599460971950253e-06, + "loss": 0.5286, + "step": 3987 + }, + { + "epoch": 0.1645621853594124, + "grad_norm": 3.0232129505304814, + "learning_rate": 2.8598614992080784e-06, + "loss": 0.5524, + "step": 3988 + }, + { + "epoch": 0.16460344969877033, + "grad_norm": 2.3618820067268307, + "learning_rate": 2.859776876930602e-06, + "loss": 0.6166, + "step": 3989 + }, + { + "epoch": 0.16464471403812825, + "grad_norm": 2.6717536344739683, + "learning_rate": 2.85969223036411e-06, + "loss": 0.5731, + "step": 3990 + }, + { + "epoch": 0.16468597837748616, + "grad_norm": 2.366352680209079, + "learning_rate": 2.8596075595101134e-06, + "loss": 0.5577, + "step": 3991 + }, + { + "epoch": 0.1647272427168441, + "grad_norm": 3.8016453018545873, + "learning_rate": 2.859522864370124e-06, + "loss": 0.5485, + "step": 3992 + }, + { + "epoch": 0.16476850705620202, + "grad_norm": 3.3522162880031163, + "learning_rate": 2.8594381449456555e-06, + "loss": 0.5656, + "step": 3993 + }, + { + "epoch": 0.16480977139555997, + "grad_norm": 4.722182193865762, + "learning_rate": 2.859353401238221e-06, + "loss": 0.5183, + "step": 3994 + }, + { + "epoch": 0.16485103573491788, + "grad_norm": 3.8674189330403577, + "learning_rate": 2.8592686332493342e-06, + "loss": 0.5851, + "step": 3995 + }, + { + "epoch": 0.1648923000742758, + "grad_norm": 2.296558382956157, + "learning_rate": 2.859183840980509e-06, + "loss": 0.5848, + "step": 3996 + }, + { + "epoch": 0.16493356441363374, + "grad_norm": 2.608037428278561, + "learning_rate": 2.85909902443326e-06, + "loss": 0.5367, + "step": 3997 + }, + { + "epoch": 0.16497482875299166, + "grad_norm": 4.114787501115439, + "learning_rate": 2.859014183609103e-06, + "loss": 0.5997, + "step": 3998 + }, + { + "epoch": 0.1650160930923496, + "grad_norm": 25.117803605884795, + "learning_rate": 2.858929318509553e-06, + "loss": 0.5671, + "step": 3999 + }, + { + "epoch": 0.16505735743170752, + "grad_norm": 2.3158273767965647, + "learning_rate": 2.8588444291361256e-06, + "loss": 0.5007, + "step": 4000 + }, + { + "epoch": 0.16509862177106543, + "grad_norm": 4.588419207789336, + "learning_rate": 2.8587595154903375e-06, + "loss": 0.6188, + "step": 4001 + }, + { + "epoch": 0.16513988611042338, + "grad_norm": 3.485297375941048, + "learning_rate": 2.8586745775737054e-06, + "loss": 0.6284, + "step": 4002 + }, + { + "epoch": 0.1651811504497813, + "grad_norm": 4.477777780578168, + "learning_rate": 2.858589615387746e-06, + "loss": 0.5413, + "step": 4003 + }, + { + "epoch": 0.16522241478913924, + "grad_norm": 5.925233386609552, + "learning_rate": 2.8585046289339776e-06, + "loss": 0.5061, + "step": 4004 + }, + { + "epoch": 0.16526367912849715, + "grad_norm": 8.266850300352012, + "learning_rate": 2.8584196182139184e-06, + "loss": 0.5303, + "step": 4005 + }, + { + "epoch": 0.16530494346785507, + "grad_norm": 2.154837458022208, + "learning_rate": 2.8583345832290864e-06, + "loss": 0.5821, + "step": 4006 + }, + { + "epoch": 0.165346207807213, + "grad_norm": 3.8900469888710862, + "learning_rate": 2.8582495239810007e-06, + "loss": 0.5887, + "step": 4007 + }, + { + "epoch": 0.16538747214657093, + "grad_norm": 4.01904015070798, + "learning_rate": 2.858164440471181e-06, + "loss": 0.5598, + "step": 4008 + }, + { + "epoch": 0.16542873648592887, + "grad_norm": 2.7973074763685277, + "learning_rate": 2.858079332701146e-06, + "loss": 0.6056, + "step": 4009 + }, + { + "epoch": 0.1654700008252868, + "grad_norm": 2.2773065943512014, + "learning_rate": 2.8579942006724177e-06, + "loss": 0.6133, + "step": 4010 + }, + { + "epoch": 0.1655112651646447, + "grad_norm": 4.798583433604552, + "learning_rate": 2.857909044386515e-06, + "loss": 0.5926, + "step": 4011 + }, + { + "epoch": 0.16555252950400265, + "grad_norm": 10.903875111492868, + "learning_rate": 2.85782386384496e-06, + "loss": 0.5633, + "step": 4012 + }, + { + "epoch": 0.16559379384336056, + "grad_norm": 2.392920353518985, + "learning_rate": 2.8577386590492744e-06, + "loss": 0.5608, + "step": 4013 + }, + { + "epoch": 0.16563505818271848, + "grad_norm": 3.766120645866179, + "learning_rate": 2.857653430000979e-06, + "loss": 0.5597, + "step": 4014 + }, + { + "epoch": 0.16567632252207642, + "grad_norm": 2.577571627760483, + "learning_rate": 2.857568176701598e-06, + "loss": 0.6173, + "step": 4015 + }, + { + "epoch": 0.16571758686143434, + "grad_norm": 5.112682394386963, + "learning_rate": 2.8574828991526528e-06, + "loss": 0.5274, + "step": 4016 + }, + { + "epoch": 0.16575885120079228, + "grad_norm": 4.568476744135979, + "learning_rate": 2.857397597355667e-06, + "loss": 0.5656, + "step": 4017 + }, + { + "epoch": 0.1658001155401502, + "grad_norm": 2.8195382618026006, + "learning_rate": 2.8573122713121646e-06, + "loss": 0.5347, + "step": 4018 + }, + { + "epoch": 0.16584137987950812, + "grad_norm": 7.464335058950939, + "learning_rate": 2.8572269210236696e-06, + "loss": 0.5887, + "step": 4019 + }, + { + "epoch": 0.16588264421886606, + "grad_norm": 2.313732858753996, + "learning_rate": 2.8571415464917063e-06, + "loss": 0.5708, + "step": 4020 + }, + { + "epoch": 0.16592390855822398, + "grad_norm": 3.646756747387592, + "learning_rate": 2.8570561477178e-06, + "loss": 0.5472, + "step": 4021 + }, + { + "epoch": 0.16596517289758192, + "grad_norm": 2.882778286542871, + "learning_rate": 2.856970724703476e-06, + "loss": 0.5795, + "step": 4022 + }, + { + "epoch": 0.16600643723693984, + "grad_norm": 4.397658978829604, + "learning_rate": 2.8568852774502607e-06, + "loss": 0.5807, + "step": 4023 + }, + { + "epoch": 0.16604770157629775, + "grad_norm": 2.7949385951925843, + "learning_rate": 2.8567998059596794e-06, + "loss": 0.5019, + "step": 4024 + }, + { + "epoch": 0.1660889659156557, + "grad_norm": 2.710385333303372, + "learning_rate": 2.8567143102332595e-06, + "loss": 0.5929, + "step": 4025 + }, + { + "epoch": 0.1661302302550136, + "grad_norm": 4.148248045525746, + "learning_rate": 2.856628790272528e-06, + "loss": 0.5601, + "step": 4026 + }, + { + "epoch": 0.16617149459437155, + "grad_norm": 3.7801082843696516, + "learning_rate": 2.856543246079013e-06, + "loss": 0.5812, + "step": 4027 + }, + { + "epoch": 0.16621275893372947, + "grad_norm": 3.574391817233898, + "learning_rate": 2.8564576776542417e-06, + "loss": 0.6136, + "step": 4028 + }, + { + "epoch": 0.1662540232730874, + "grad_norm": 2.1455596270203583, + "learning_rate": 2.856372084999743e-06, + "loss": 0.601, + "step": 4029 + }, + { + "epoch": 0.16629528761244533, + "grad_norm": 2.8488999067444913, + "learning_rate": 2.8562864681170457e-06, + "loss": 0.5805, + "step": 4030 + }, + { + "epoch": 0.16633655195180325, + "grad_norm": 3.0119274799845184, + "learning_rate": 2.8562008270076788e-06, + "loss": 0.5534, + "step": 4031 + }, + { + "epoch": 0.1663778162911612, + "grad_norm": 2.9867790937268692, + "learning_rate": 2.856115161673173e-06, + "loss": 0.4612, + "step": 4032 + }, + { + "epoch": 0.1664190806305191, + "grad_norm": 3.481702028809394, + "learning_rate": 2.8560294721150576e-06, + "loss": 0.6169, + "step": 4033 + }, + { + "epoch": 0.16646034496987702, + "grad_norm": 3.0021557448852603, + "learning_rate": 2.855943758334864e-06, + "loss": 0.5284, + "step": 4034 + }, + { + "epoch": 0.16650160930923497, + "grad_norm": 2.3661171707507007, + "learning_rate": 2.8558580203341225e-06, + "loss": 0.5512, + "step": 4035 + }, + { + "epoch": 0.16654287364859288, + "grad_norm": 5.522035748922308, + "learning_rate": 2.8557722581143645e-06, + "loss": 0.5207, + "step": 4036 + }, + { + "epoch": 0.16658413798795083, + "grad_norm": 2.2757705708957796, + "learning_rate": 2.855686471677123e-06, + "loss": 0.5398, + "step": 4037 + }, + { + "epoch": 0.16662540232730874, + "grad_norm": 3.9351527813948723, + "learning_rate": 2.8556006610239295e-06, + "loss": 0.5279, + "step": 4038 + }, + { + "epoch": 0.16666666666666666, + "grad_norm": 2.6679728164658503, + "learning_rate": 2.855514826156317e-06, + "loss": 0.549, + "step": 4039 + }, + { + "epoch": 0.1667079310060246, + "grad_norm": 6.496005983772854, + "learning_rate": 2.8554289670758187e-06, + "loss": 0.5906, + "step": 4040 + }, + { + "epoch": 0.16674919534538252, + "grad_norm": 3.4977565417222967, + "learning_rate": 2.8553430837839685e-06, + "loss": 0.606, + "step": 4041 + }, + { + "epoch": 0.16679045968474046, + "grad_norm": 3.520857209366931, + "learning_rate": 2.8552571762823e-06, + "loss": 0.5916, + "step": 4042 + }, + { + "epoch": 0.16683172402409838, + "grad_norm": 2.5951199693330547, + "learning_rate": 2.8551712445723483e-06, + "loss": 0.6064, + "step": 4043 + }, + { + "epoch": 0.1668729883634563, + "grad_norm": 2.0641616766912625, + "learning_rate": 2.8550852886556475e-06, + "loss": 0.5035, + "step": 4044 + }, + { + "epoch": 0.16691425270281424, + "grad_norm": 3.4043079201966098, + "learning_rate": 2.854999308533734e-06, + "loss": 0.5732, + "step": 4045 + }, + { + "epoch": 0.16695551704217215, + "grad_norm": 2.638407204461933, + "learning_rate": 2.854913304208143e-06, + "loss": 0.5498, + "step": 4046 + }, + { + "epoch": 0.16699678138153007, + "grad_norm": 2.9227365112200636, + "learning_rate": 2.854827275680411e-06, + "loss": 0.5495, + "step": 4047 + }, + { + "epoch": 0.167038045720888, + "grad_norm": 2.3076221169411455, + "learning_rate": 2.8547412229520746e-06, + "loss": 0.5881, + "step": 4048 + }, + { + "epoch": 0.16707931006024593, + "grad_norm": 2.9014452678737053, + "learning_rate": 2.8546551460246708e-06, + "loss": 0.5805, + "step": 4049 + }, + { + "epoch": 0.16712057439960387, + "grad_norm": 2.314497798161297, + "learning_rate": 2.8545690448997376e-06, + "loss": 0.5316, + "step": 4050 + }, + { + "epoch": 0.1671618387389618, + "grad_norm": 4.6124769705202, + "learning_rate": 2.854482919578812e-06, + "loss": 0.5445, + "step": 4051 + }, + { + "epoch": 0.1672031030783197, + "grad_norm": 8.092982476266318, + "learning_rate": 2.854396770063433e-06, + "loss": 0.6335, + "step": 4052 + }, + { + "epoch": 0.16724436741767765, + "grad_norm": 5.236857326349627, + "learning_rate": 2.8543105963551404e-06, + "loss": 0.5284, + "step": 4053 + }, + { + "epoch": 0.16728563175703556, + "grad_norm": 3.423982908875862, + "learning_rate": 2.854224398455472e-06, + "loss": 0.6175, + "step": 4054 + }, + { + "epoch": 0.1673268960963935, + "grad_norm": 8.695645865297248, + "learning_rate": 2.854138176365968e-06, + "loss": 0.524, + "step": 4055 + }, + { + "epoch": 0.16736816043575142, + "grad_norm": 2.267588756853779, + "learning_rate": 2.854051930088169e-06, + "loss": 0.5365, + "step": 4056 + }, + { + "epoch": 0.16740942477510934, + "grad_norm": 2.7834976421375974, + "learning_rate": 2.8539656596236155e-06, + "loss": 0.6021, + "step": 4057 + }, + { + "epoch": 0.16745068911446728, + "grad_norm": 1.775027436320947, + "learning_rate": 2.8538793649738478e-06, + "loss": 0.5048, + "step": 4058 + }, + { + "epoch": 0.1674919534538252, + "grad_norm": 2.795492666435562, + "learning_rate": 2.8537930461404072e-06, + "loss": 0.5668, + "step": 4059 + }, + { + "epoch": 0.16753321779318314, + "grad_norm": 2.838074632595115, + "learning_rate": 2.8537067031248367e-06, + "loss": 0.5314, + "step": 4060 + }, + { + "epoch": 0.16757448213254106, + "grad_norm": 2.8042727016465325, + "learning_rate": 2.8536203359286783e-06, + "loss": 0.5653, + "step": 4061 + }, + { + "epoch": 0.16761574647189897, + "grad_norm": 3.1947255167533624, + "learning_rate": 2.853533944553474e-06, + "loss": 0.6183, + "step": 4062 + }, + { + "epoch": 0.16765701081125692, + "grad_norm": 2.129442548783324, + "learning_rate": 2.853447529000768e-06, + "loss": 0.5397, + "step": 4063 + }, + { + "epoch": 0.16769827515061483, + "grad_norm": 2.981273255217708, + "learning_rate": 2.8533610892721024e-06, + "loss": 0.5659, + "step": 4064 + }, + { + "epoch": 0.16773953948997278, + "grad_norm": 3.517869829955571, + "learning_rate": 2.8532746253690226e-06, + "loss": 0.5621, + "step": 4065 + }, + { + "epoch": 0.1677808038293307, + "grad_norm": 1.8015039902357133, + "learning_rate": 2.8531881372930727e-06, + "loss": 0.5495, + "step": 4066 + }, + { + "epoch": 0.1678220681686886, + "grad_norm": 2.4846538225267905, + "learning_rate": 2.8531016250457975e-06, + "loss": 0.5517, + "step": 4067 + }, + { + "epoch": 0.16786333250804655, + "grad_norm": 2.9626793014825377, + "learning_rate": 2.8530150886287425e-06, + "loss": 0.5759, + "step": 4068 + }, + { + "epoch": 0.16790459684740447, + "grad_norm": 2.754499220594409, + "learning_rate": 2.852928528043453e-06, + "loss": 0.6188, + "step": 4069 + }, + { + "epoch": 0.1679458611867624, + "grad_norm": 3.701537692621175, + "learning_rate": 2.8528419432914757e-06, + "loss": 0.6236, + "step": 4070 + }, + { + "epoch": 0.16798712552612033, + "grad_norm": 2.3373628869059466, + "learning_rate": 2.852755334374357e-06, + "loss": 0.5612, + "step": 4071 + }, + { + "epoch": 0.16802838986547824, + "grad_norm": 3.478647061942048, + "learning_rate": 2.8526687012936435e-06, + "loss": 0.5601, + "step": 4072 + }, + { + "epoch": 0.1680696542048362, + "grad_norm": 4.165603397063237, + "learning_rate": 2.8525820440508835e-06, + "loss": 0.5857, + "step": 4073 + }, + { + "epoch": 0.1681109185441941, + "grad_norm": 2.965028800880287, + "learning_rate": 2.852495362647624e-06, + "loss": 0.5807, + "step": 4074 + }, + { + "epoch": 0.16815218288355202, + "grad_norm": 4.846971387581239, + "learning_rate": 2.8524086570854148e-06, + "loss": 0.5249, + "step": 4075 + }, + { + "epoch": 0.16819344722290996, + "grad_norm": 12.692140131741617, + "learning_rate": 2.8523219273658033e-06, + "loss": 0.5261, + "step": 4076 + }, + { + "epoch": 0.16823471156226788, + "grad_norm": 2.817447111220775, + "learning_rate": 2.8522351734903393e-06, + "loss": 0.5356, + "step": 4077 + }, + { + "epoch": 0.16827597590162582, + "grad_norm": 1.9356052631759875, + "learning_rate": 2.8521483954605726e-06, + "loss": 0.487, + "step": 4078 + }, + { + "epoch": 0.16831724024098374, + "grad_norm": 2.4059189339628895, + "learning_rate": 2.8520615932780526e-06, + "loss": 0.5308, + "step": 4079 + }, + { + "epoch": 0.16835850458034166, + "grad_norm": 3.3305839168285085, + "learning_rate": 2.85197476694433e-06, + "loss": 0.5574, + "step": 4080 + }, + { + "epoch": 0.1683997689196996, + "grad_norm": 5.094120493141725, + "learning_rate": 2.8518879164609566e-06, + "loss": 0.546, + "step": 4081 + }, + { + "epoch": 0.16844103325905752, + "grad_norm": 2.2110780058246298, + "learning_rate": 2.8518010418294822e-06, + "loss": 0.5572, + "step": 4082 + }, + { + "epoch": 0.16848229759841546, + "grad_norm": 3.1563215592898732, + "learning_rate": 2.8517141430514605e-06, + "loss": 0.5433, + "step": 4083 + }, + { + "epoch": 0.16852356193777338, + "grad_norm": 2.702539269160638, + "learning_rate": 2.8516272201284424e-06, + "loss": 0.5941, + "step": 4084 + }, + { + "epoch": 0.1685648262771313, + "grad_norm": 2.6384876460435125, + "learning_rate": 2.851540273061981e-06, + "loss": 0.5814, + "step": 4085 + }, + { + "epoch": 0.16860609061648923, + "grad_norm": 8.469641320141466, + "learning_rate": 2.851453301853629e-06, + "loss": 0.5004, + "step": 4086 + }, + { + "epoch": 0.16864735495584715, + "grad_norm": 3.4065031795027125, + "learning_rate": 2.85136630650494e-06, + "loss": 0.5259, + "step": 4087 + }, + { + "epoch": 0.1686886192952051, + "grad_norm": 14.75892710857907, + "learning_rate": 2.851279287017469e-06, + "loss": 0.5767, + "step": 4088 + }, + { + "epoch": 0.168729883634563, + "grad_norm": 1.998324603372513, + "learning_rate": 2.8511922433927686e-06, + "loss": 0.4697, + "step": 4089 + }, + { + "epoch": 0.16877114797392093, + "grad_norm": 3.558408770591107, + "learning_rate": 2.8511051756323947e-06, + "loss": 0.5841, + "step": 4090 + }, + { + "epoch": 0.16881241231327887, + "grad_norm": 3.131390111040722, + "learning_rate": 2.851018083737903e-06, + "loss": 0.5585, + "step": 4091 + }, + { + "epoch": 0.16885367665263679, + "grad_norm": 3.5810087126285866, + "learning_rate": 2.850930967710848e-06, + "loss": 0.642, + "step": 4092 + }, + { + "epoch": 0.16889494099199473, + "grad_norm": 2.405355644138216, + "learning_rate": 2.850843827552787e-06, + "loss": 0.5453, + "step": 4093 + }, + { + "epoch": 0.16893620533135265, + "grad_norm": 2.959941748857188, + "learning_rate": 2.8507566632652754e-06, + "loss": 0.5354, + "step": 4094 + }, + { + "epoch": 0.16897746967071056, + "grad_norm": 2.3804220269879117, + "learning_rate": 2.850669474849871e-06, + "loss": 0.5058, + "step": 4095 + }, + { + "epoch": 0.1690187340100685, + "grad_norm": 4.050691477754122, + "learning_rate": 2.8505822623081308e-06, + "loss": 0.5944, + "step": 4096 + }, + { + "epoch": 0.16905999834942642, + "grad_norm": 5.813010784848781, + "learning_rate": 2.8504950256416128e-06, + "loss": 0.56, + "step": 4097 + }, + { + "epoch": 0.16910126268878437, + "grad_norm": 3.433191656647244, + "learning_rate": 2.8504077648518755e-06, + "loss": 0.6004, + "step": 4098 + }, + { + "epoch": 0.16914252702814228, + "grad_norm": 2.6421136014812414, + "learning_rate": 2.8503204799404767e-06, + "loss": 0.5471, + "step": 4099 + }, + { + "epoch": 0.1691837913675002, + "grad_norm": 5.4877857259389025, + "learning_rate": 2.850233170908977e-06, + "loss": 0.5649, + "step": 4100 + }, + { + "epoch": 0.16922505570685814, + "grad_norm": 2.072455383120809, + "learning_rate": 2.8501458377589345e-06, + "loss": 0.5608, + "step": 4101 + }, + { + "epoch": 0.16926632004621606, + "grad_norm": 2.0795362174410883, + "learning_rate": 2.8500584804919095e-06, + "loss": 0.5444, + "step": 4102 + }, + { + "epoch": 0.169307584385574, + "grad_norm": 2.6756339548600243, + "learning_rate": 2.849971099109463e-06, + "loss": 0.5522, + "step": 4103 + }, + { + "epoch": 0.16934884872493192, + "grad_norm": 3.2181637031010295, + "learning_rate": 2.8498836936131558e-06, + "loss": 0.5424, + "step": 4104 + }, + { + "epoch": 0.16939011306428983, + "grad_norm": 3.8498664218046206, + "learning_rate": 2.849796264004549e-06, + "loss": 0.5864, + "step": 4105 + }, + { + "epoch": 0.16943137740364778, + "grad_norm": 2.6390451151848007, + "learning_rate": 2.849708810285204e-06, + "loss": 0.6267, + "step": 4106 + }, + { + "epoch": 0.1694726417430057, + "grad_norm": 3.751752914145357, + "learning_rate": 2.849621332456683e-06, + "loss": 0.4616, + "step": 4107 + }, + { + "epoch": 0.1695139060823636, + "grad_norm": 2.3833775676597364, + "learning_rate": 2.8495338305205493e-06, + "loss": 0.5914, + "step": 4108 + }, + { + "epoch": 0.16955517042172155, + "grad_norm": 4.189355851303513, + "learning_rate": 2.849446304478365e-06, + "loss": 0.5802, + "step": 4109 + }, + { + "epoch": 0.16959643476107947, + "grad_norm": 3.3884889895686103, + "learning_rate": 2.8493587543316937e-06, + "loss": 0.6006, + "step": 4110 + }, + { + "epoch": 0.1696376991004374, + "grad_norm": 8.790947995234866, + "learning_rate": 2.8492711800821e-06, + "loss": 0.585, + "step": 4111 + }, + { + "epoch": 0.16967896343979533, + "grad_norm": 3.640089917364051, + "learning_rate": 2.8491835817311476e-06, + "loss": 0.547, + "step": 4112 + }, + { + "epoch": 0.16972022777915324, + "grad_norm": 3.0985670567773624, + "learning_rate": 2.849095959280401e-06, + "loss": 0.5711, + "step": 4113 + }, + { + "epoch": 0.1697614921185112, + "grad_norm": 7.705414734447043, + "learning_rate": 2.849008312731426e-06, + "loss": 0.5979, + "step": 4114 + }, + { + "epoch": 0.1698027564578691, + "grad_norm": 2.672719733827202, + "learning_rate": 2.8489206420857876e-06, + "loss": 0.5847, + "step": 4115 + }, + { + "epoch": 0.16984402079722705, + "grad_norm": 3.1288443648283324, + "learning_rate": 2.848832947345052e-06, + "loss": 0.5117, + "step": 4116 + }, + { + "epoch": 0.16988528513658496, + "grad_norm": 7.290059940365063, + "learning_rate": 2.8487452285107855e-06, + "loss": 0.5844, + "step": 4117 + }, + { + "epoch": 0.16992654947594288, + "grad_norm": 3.0300231106161117, + "learning_rate": 2.848657485584556e-06, + "loss": 0.5716, + "step": 4118 + }, + { + "epoch": 0.16996781381530082, + "grad_norm": 3.2690385963385826, + "learning_rate": 2.848569718567929e-06, + "loss": 0.5845, + "step": 4119 + }, + { + "epoch": 0.17000907815465874, + "grad_norm": 3.1731634048751536, + "learning_rate": 2.8484819274624737e-06, + "loss": 0.5744, + "step": 4120 + }, + { + "epoch": 0.17005034249401668, + "grad_norm": 2.154520938444159, + "learning_rate": 2.8483941122697575e-06, + "loss": 0.5463, + "step": 4121 + }, + { + "epoch": 0.1700916068333746, + "grad_norm": 3.0992702660170073, + "learning_rate": 2.8483062729913498e-06, + "loss": 0.6171, + "step": 4122 + }, + { + "epoch": 0.1701328711727325, + "grad_norm": 2.3611612515487272, + "learning_rate": 2.848218409628819e-06, + "loss": 0.5228, + "step": 4123 + }, + { + "epoch": 0.17017413551209046, + "grad_norm": 3.527185276231779, + "learning_rate": 2.8481305221837344e-06, + "loss": 0.5389, + "step": 4124 + }, + { + "epoch": 0.17021539985144837, + "grad_norm": 3.552189596420187, + "learning_rate": 2.8480426106576663e-06, + "loss": 0.5425, + "step": 4125 + }, + { + "epoch": 0.17025666419080632, + "grad_norm": 2.9514296118070256, + "learning_rate": 2.8479546750521845e-06, + "loss": 0.5862, + "step": 4126 + }, + { + "epoch": 0.17029792853016423, + "grad_norm": 5.674173021578533, + "learning_rate": 2.8478667153688608e-06, + "loss": 0.5192, + "step": 4127 + }, + { + "epoch": 0.17033919286952215, + "grad_norm": 7.206527606048022, + "learning_rate": 2.8477787316092654e-06, + "loss": 0.5564, + "step": 4128 + }, + { + "epoch": 0.1703804572088801, + "grad_norm": 1.9087218181343908, + "learning_rate": 2.84769072377497e-06, + "loss": 0.48, + "step": 4129 + }, + { + "epoch": 0.170421721548238, + "grad_norm": 2.7126778807151655, + "learning_rate": 2.847602691867547e-06, + "loss": 0.5249, + "step": 4130 + }, + { + "epoch": 0.17046298588759595, + "grad_norm": 2.9355574072034267, + "learning_rate": 2.847514635888569e-06, + "loss": 0.5436, + "step": 4131 + }, + { + "epoch": 0.17050425022695387, + "grad_norm": 4.7013440632481975, + "learning_rate": 2.8474265558396086e-06, + "loss": 0.6056, + "step": 4132 + }, + { + "epoch": 0.17054551456631178, + "grad_norm": 4.424280336161891, + "learning_rate": 2.8473384517222392e-06, + "loss": 0.5796, + "step": 4133 + }, + { + "epoch": 0.17058677890566973, + "grad_norm": 3.7225351784640384, + "learning_rate": 2.847250323538034e-06, + "loss": 0.5139, + "step": 4134 + }, + { + "epoch": 0.17062804324502764, + "grad_norm": 4.106577998957455, + "learning_rate": 2.847162171288568e-06, + "loss": 0.567, + "step": 4135 + }, + { + "epoch": 0.1706693075843856, + "grad_norm": 2.160830960077592, + "learning_rate": 2.8470739949754157e-06, + "loss": 0.5319, + "step": 4136 + }, + { + "epoch": 0.1707105719237435, + "grad_norm": 3.8792563835572165, + "learning_rate": 2.846985794600152e-06, + "loss": 0.5699, + "step": 4137 + }, + { + "epoch": 0.17075183626310142, + "grad_norm": 2.273765571056806, + "learning_rate": 2.8468975701643524e-06, + "loss": 0.5231, + "step": 4138 + }, + { + "epoch": 0.17079310060245936, + "grad_norm": 3.7982637992445496, + "learning_rate": 2.8468093216695925e-06, + "loss": 0.547, + "step": 4139 + }, + { + "epoch": 0.17083436494181728, + "grad_norm": 2.443408642183658, + "learning_rate": 2.8467210491174494e-06, + "loss": 0.5045, + "step": 4140 + }, + { + "epoch": 0.1708756292811752, + "grad_norm": 2.43449091842518, + "learning_rate": 2.846632752509499e-06, + "loss": 0.6403, + "step": 4141 + }, + { + "epoch": 0.17091689362053314, + "grad_norm": 3.702114306198361, + "learning_rate": 2.846544431847319e-06, + "loss": 0.5431, + "step": 4142 + }, + { + "epoch": 0.17095815795989106, + "grad_norm": 1.746198897534032, + "learning_rate": 2.8464560871324868e-06, + "loss": 0.5301, + "step": 4143 + }, + { + "epoch": 0.170999422299249, + "grad_norm": 3.908358998112028, + "learning_rate": 2.8463677183665806e-06, + "loss": 0.6259, + "step": 4144 + }, + { + "epoch": 0.17104068663860691, + "grad_norm": 2.9303948806448594, + "learning_rate": 2.8462793255511788e-06, + "loss": 0.5623, + "step": 4145 + }, + { + "epoch": 0.17108195097796483, + "grad_norm": 2.4977924365499065, + "learning_rate": 2.8461909086878606e-06, + "loss": 0.6019, + "step": 4146 + }, + { + "epoch": 0.17112321531732277, + "grad_norm": 22.6020917821618, + "learning_rate": 2.8461024677782046e-06, + "loss": 0.5229, + "step": 4147 + }, + { + "epoch": 0.1711644796566807, + "grad_norm": 4.755628411990504, + "learning_rate": 2.8460140028237913e-06, + "loss": 0.5882, + "step": 4148 + }, + { + "epoch": 0.17120574399603863, + "grad_norm": 3.409451977141713, + "learning_rate": 2.8459255138262013e-06, + "loss": 0.541, + "step": 4149 + }, + { + "epoch": 0.17124700833539655, + "grad_norm": 9.540224276488157, + "learning_rate": 2.845837000787014e-06, + "loss": 0.4994, + "step": 4150 + }, + { + "epoch": 0.17128827267475447, + "grad_norm": 2.501614085741714, + "learning_rate": 2.845748463707811e-06, + "loss": 0.601, + "step": 4151 + }, + { + "epoch": 0.1713295370141124, + "grad_norm": 3.4413089376774244, + "learning_rate": 2.8456599025901746e-06, + "loss": 0.5152, + "step": 4152 + }, + { + "epoch": 0.17137080135347033, + "grad_norm": 1.8377424584327622, + "learning_rate": 2.8455713174356853e-06, + "loss": 0.5073, + "step": 4153 + }, + { + "epoch": 0.17141206569282827, + "grad_norm": 4.25098422253011, + "learning_rate": 2.8454827082459263e-06, + "loss": 0.5674, + "step": 4154 + }, + { + "epoch": 0.17145333003218619, + "grad_norm": 3.5851736407881463, + "learning_rate": 2.8453940750224804e-06, + "loss": 0.5861, + "step": 4155 + }, + { + "epoch": 0.1714945943715441, + "grad_norm": 12.94180172704423, + "learning_rate": 2.845305417766931e-06, + "loss": 0.578, + "step": 4156 + }, + { + "epoch": 0.17153585871090204, + "grad_norm": 3.2860077637141982, + "learning_rate": 2.845216736480861e-06, + "loss": 0.5491, + "step": 4157 + }, + { + "epoch": 0.17157712305025996, + "grad_norm": 4.182375770891037, + "learning_rate": 2.8451280311658548e-06, + "loss": 0.5721, + "step": 4158 + }, + { + "epoch": 0.1716183873896179, + "grad_norm": 2.298863799235707, + "learning_rate": 2.8450393018234973e-06, + "loss": 0.5668, + "step": 4159 + }, + { + "epoch": 0.17165965172897582, + "grad_norm": 3.976025469845142, + "learning_rate": 2.8449505484553726e-06, + "loss": 0.6256, + "step": 4160 + }, + { + "epoch": 0.17170091606833374, + "grad_norm": 2.5240583193154476, + "learning_rate": 2.844861771063067e-06, + "loss": 0.5281, + "step": 4161 + }, + { + "epoch": 0.17174218040769168, + "grad_norm": 2.2491259991869685, + "learning_rate": 2.8447729696481657e-06, + "loss": 0.5083, + "step": 4162 + }, + { + "epoch": 0.1717834447470496, + "grad_norm": 3.8088856928836203, + "learning_rate": 2.844684144212255e-06, + "loss": 0.5218, + "step": 4163 + }, + { + "epoch": 0.17182470908640754, + "grad_norm": 5.869375298212125, + "learning_rate": 2.8445952947569217e-06, + "loss": 0.5792, + "step": 4164 + }, + { + "epoch": 0.17186597342576546, + "grad_norm": 2.612632832882836, + "learning_rate": 2.8445064212837526e-06, + "loss": 0.533, + "step": 4165 + }, + { + "epoch": 0.17190723776512337, + "grad_norm": 4.878355429446382, + "learning_rate": 2.8444175237943356e-06, + "loss": 0.6052, + "step": 4166 + }, + { + "epoch": 0.17194850210448132, + "grad_norm": 2.576222834392815, + "learning_rate": 2.844328602290258e-06, + "loss": 0.5871, + "step": 4167 + }, + { + "epoch": 0.17198976644383923, + "grad_norm": 4.550418640611923, + "learning_rate": 2.844239656773109e-06, + "loss": 0.6001, + "step": 4168 + }, + { + "epoch": 0.17203103078319715, + "grad_norm": 10.656052231510978, + "learning_rate": 2.8441506872444768e-06, + "loss": 0.6565, + "step": 4169 + }, + { + "epoch": 0.1720722951225551, + "grad_norm": 2.780550305950709, + "learning_rate": 2.844061693705951e-06, + "loss": 0.58, + "step": 4170 + }, + { + "epoch": 0.172113559461913, + "grad_norm": 2.8628110308620727, + "learning_rate": 2.843972676159121e-06, + "loss": 0.5915, + "step": 4171 + }, + { + "epoch": 0.17215482380127095, + "grad_norm": 4.085594334569392, + "learning_rate": 2.8438836346055766e-06, + "loss": 0.5818, + "step": 4172 + }, + { + "epoch": 0.17219608814062887, + "grad_norm": 3.286628048247299, + "learning_rate": 2.8437945690469086e-06, + "loss": 0.6124, + "step": 4173 + }, + { + "epoch": 0.17223735247998678, + "grad_norm": 3.7205325900580233, + "learning_rate": 2.843705479484708e-06, + "loss": 0.5085, + "step": 4174 + }, + { + "epoch": 0.17227861681934473, + "grad_norm": 8.150932749349723, + "learning_rate": 2.8436163659205666e-06, + "loss": 0.6374, + "step": 4175 + }, + { + "epoch": 0.17231988115870264, + "grad_norm": 2.377385496783008, + "learning_rate": 2.8435272283560753e-06, + "loss": 0.5765, + "step": 4176 + }, + { + "epoch": 0.1723611454980606, + "grad_norm": 2.952134337054589, + "learning_rate": 2.8434380667928266e-06, + "loss": 0.6062, + "step": 4177 + }, + { + "epoch": 0.1724024098374185, + "grad_norm": 2.84296928469599, + "learning_rate": 2.8433488812324133e-06, + "loss": 0.5384, + "step": 4178 + }, + { + "epoch": 0.17244367417677642, + "grad_norm": 2.4578453803777274, + "learning_rate": 2.843259671676428e-06, + "loss": 0.5747, + "step": 4179 + }, + { + "epoch": 0.17248493851613436, + "grad_norm": 7.168117177557953, + "learning_rate": 2.8431704381264657e-06, + "loss": 0.5801, + "step": 4180 + }, + { + "epoch": 0.17252620285549228, + "grad_norm": 2.7877574952707285, + "learning_rate": 2.8430811805841184e-06, + "loss": 0.5801, + "step": 4181 + }, + { + "epoch": 0.17256746719485022, + "grad_norm": 2.612506615958705, + "learning_rate": 2.8429918990509817e-06, + "loss": 0.5582, + "step": 4182 + }, + { + "epoch": 0.17260873153420814, + "grad_norm": 5.405511352076598, + "learning_rate": 2.84290259352865e-06, + "loss": 0.5592, + "step": 4183 + }, + { + "epoch": 0.17264999587356605, + "grad_norm": 2.436532633862299, + "learning_rate": 2.8428132640187185e-06, + "loss": 0.5725, + "step": 4184 + }, + { + "epoch": 0.172691260212924, + "grad_norm": 3.1915406554084145, + "learning_rate": 2.842723910522783e-06, + "loss": 0.5768, + "step": 4185 + }, + { + "epoch": 0.1727325245522819, + "grad_norm": 4.0719551004725005, + "learning_rate": 2.8426345330424392e-06, + "loss": 0.5617, + "step": 4186 + }, + { + "epoch": 0.17277378889163986, + "grad_norm": 4.899595505407739, + "learning_rate": 2.842545131579284e-06, + "loss": 0.5497, + "step": 4187 + }, + { + "epoch": 0.17281505323099777, + "grad_norm": 3.3516502420986214, + "learning_rate": 2.8424557061349145e-06, + "loss": 0.5438, + "step": 4188 + }, + { + "epoch": 0.1728563175703557, + "grad_norm": 2.7204349463163995, + "learning_rate": 2.842366256710927e-06, + "loss": 0.5792, + "step": 4189 + }, + { + "epoch": 0.17289758190971363, + "grad_norm": 3.604591294972357, + "learning_rate": 2.842276783308921e-06, + "loss": 0.5617, + "step": 4190 + }, + { + "epoch": 0.17293884624907155, + "grad_norm": 5.212763390379829, + "learning_rate": 2.8421872859304937e-06, + "loss": 0.5638, + "step": 4191 + }, + { + "epoch": 0.1729801105884295, + "grad_norm": 2.4545825412126843, + "learning_rate": 2.8420977645772434e-06, + "loss": 0.5952, + "step": 4192 + }, + { + "epoch": 0.1730213749277874, + "grad_norm": 2.2366466965702014, + "learning_rate": 2.8420082192507695e-06, + "loss": 0.5612, + "step": 4193 + }, + { + "epoch": 0.17306263926714532, + "grad_norm": 2.544164490909469, + "learning_rate": 2.841918649952672e-06, + "loss": 0.5582, + "step": 4194 + }, + { + "epoch": 0.17310390360650327, + "grad_norm": 4.97941114603406, + "learning_rate": 2.8418290566845504e-06, + "loss": 0.536, + "step": 4195 + }, + { + "epoch": 0.17314516794586118, + "grad_norm": 2.702214930714659, + "learning_rate": 2.841739439448005e-06, + "loss": 0.5132, + "step": 4196 + }, + { + "epoch": 0.17318643228521913, + "grad_norm": 6.0386766799387095, + "learning_rate": 2.8416497982446367e-06, + "loss": 0.6103, + "step": 4197 + }, + { + "epoch": 0.17322769662457704, + "grad_norm": 3.6984959685153336, + "learning_rate": 2.8415601330760468e-06, + "loss": 0.5488, + "step": 4198 + }, + { + "epoch": 0.17326896096393496, + "grad_norm": 4.41789651252573, + "learning_rate": 2.841470443943837e-06, + "loss": 0.5398, + "step": 4199 + }, + { + "epoch": 0.1733102253032929, + "grad_norm": 4.412563025578881, + "learning_rate": 2.841380730849609e-06, + "loss": 0.585, + "step": 4200 + }, + { + "epoch": 0.17335148964265082, + "grad_norm": 2.670627016403631, + "learning_rate": 2.841290993794965e-06, + "loss": 0.6029, + "step": 4201 + }, + { + "epoch": 0.17339275398200873, + "grad_norm": 4.872734342864011, + "learning_rate": 2.8412012327815093e-06, + "loss": 0.5383, + "step": 4202 + }, + { + "epoch": 0.17343401832136668, + "grad_norm": 4.500201444170803, + "learning_rate": 2.8411114478108442e-06, + "loss": 0.54, + "step": 4203 + }, + { + "epoch": 0.1734752826607246, + "grad_norm": 3.0904208420613974, + "learning_rate": 2.841021638884574e-06, + "loss": 0.5018, + "step": 4204 + }, + { + "epoch": 0.17351654700008254, + "grad_norm": 8.671646715801284, + "learning_rate": 2.840931806004302e-06, + "loss": 0.5804, + "step": 4205 + }, + { + "epoch": 0.17355781133944045, + "grad_norm": 1.936683927639777, + "learning_rate": 2.8408419491716334e-06, + "loss": 0.5719, + "step": 4206 + }, + { + "epoch": 0.17359907567879837, + "grad_norm": 21.115938467075956, + "learning_rate": 2.8407520683881733e-06, + "loss": 0.565, + "step": 4207 + }, + { + "epoch": 0.17364034001815631, + "grad_norm": 2.053744066560799, + "learning_rate": 2.840662163655528e-06, + "loss": 0.5506, + "step": 4208 + }, + { + "epoch": 0.17368160435751423, + "grad_norm": 4.109414362313344, + "learning_rate": 2.8405722349753015e-06, + "loss": 0.5401, + "step": 4209 + }, + { + "epoch": 0.17372286869687217, + "grad_norm": 3.0539910481544346, + "learning_rate": 2.840482282349102e-06, + "loss": 0.5609, + "step": 4210 + }, + { + "epoch": 0.1737641330362301, + "grad_norm": 2.414473142939482, + "learning_rate": 2.840392305778536e-06, + "loss": 0.6034, + "step": 4211 + }, + { + "epoch": 0.173805397375588, + "grad_norm": 4.414763707591651, + "learning_rate": 2.8403023052652093e-06, + "loss": 0.5563, + "step": 4212 + }, + { + "epoch": 0.17384666171494595, + "grad_norm": 4.204935961594683, + "learning_rate": 2.8402122808107312e-06, + "loss": 0.563, + "step": 4213 + }, + { + "epoch": 0.17388792605430387, + "grad_norm": 3.8730572773501244, + "learning_rate": 2.8401222324167088e-06, + "loss": 0.6131, + "step": 4214 + }, + { + "epoch": 0.1739291903936618, + "grad_norm": 2.7069236665393928, + "learning_rate": 2.840032160084751e-06, + "loss": 0.5294, + "step": 4215 + }, + { + "epoch": 0.17397045473301972, + "grad_norm": 8.444790746796727, + "learning_rate": 2.8399420638164664e-06, + "loss": 0.6226, + "step": 4216 + }, + { + "epoch": 0.17401171907237764, + "grad_norm": 3.0311925830776714, + "learning_rate": 2.8398519436134644e-06, + "loss": 0.6507, + "step": 4217 + }, + { + "epoch": 0.17405298341173558, + "grad_norm": 14.028517417976365, + "learning_rate": 2.839761799477355e-06, + "loss": 0.5318, + "step": 4218 + }, + { + "epoch": 0.1740942477510935, + "grad_norm": 9.168983222892185, + "learning_rate": 2.839671631409748e-06, + "loss": 0.5414, + "step": 4219 + }, + { + "epoch": 0.17413551209045144, + "grad_norm": 4.401117397824573, + "learning_rate": 2.839581439412255e-06, + "loss": 0.5305, + "step": 4220 + }, + { + "epoch": 0.17417677642980936, + "grad_norm": 4.034189074826887, + "learning_rate": 2.839491223486486e-06, + "loss": 0.5653, + "step": 4221 + }, + { + "epoch": 0.17421804076916728, + "grad_norm": 2.82220249024075, + "learning_rate": 2.839400983634053e-06, + "loss": 0.5744, + "step": 4222 + }, + { + "epoch": 0.17425930510852522, + "grad_norm": 2.995194529842909, + "learning_rate": 2.8393107198565677e-06, + "loss": 0.5447, + "step": 4223 + }, + { + "epoch": 0.17430056944788314, + "grad_norm": 9.705046380666854, + "learning_rate": 2.8392204321556426e-06, + "loss": 0.6312, + "step": 4224 + }, + { + "epoch": 0.17434183378724108, + "grad_norm": 2.5077043958786365, + "learning_rate": 2.83913012053289e-06, + "loss": 0.5014, + "step": 4225 + }, + { + "epoch": 0.174383098126599, + "grad_norm": 10.315590623653996, + "learning_rate": 2.8390397849899238e-06, + "loss": 0.6003, + "step": 4226 + }, + { + "epoch": 0.1744243624659569, + "grad_norm": 2.677097683781166, + "learning_rate": 2.8389494255283568e-06, + "loss": 0.5862, + "step": 4227 + }, + { + "epoch": 0.17446562680531486, + "grad_norm": 3.775040722764965, + "learning_rate": 2.8388590421498036e-06, + "loss": 0.5609, + "step": 4228 + }, + { + "epoch": 0.17450689114467277, + "grad_norm": 2.313280117492642, + "learning_rate": 2.838768634855879e-06, + "loss": 0.5029, + "step": 4229 + }, + { + "epoch": 0.1745481554840307, + "grad_norm": 2.05556316698294, + "learning_rate": 2.8386782036481977e-06, + "loss": 0.5991, + "step": 4230 + }, + { + "epoch": 0.17458941982338863, + "grad_norm": 2.7053477656213865, + "learning_rate": 2.8385877485283743e-06, + "loss": 0.5822, + "step": 4231 + }, + { + "epoch": 0.17463068416274655, + "grad_norm": 5.092533741658675, + "learning_rate": 2.8384972694980255e-06, + "loss": 0.5574, + "step": 4232 + }, + { + "epoch": 0.1746719485021045, + "grad_norm": 3.3981004712662877, + "learning_rate": 2.8384067665587664e-06, + "loss": 0.5702, + "step": 4233 + }, + { + "epoch": 0.1747132128414624, + "grad_norm": 10.904014861283262, + "learning_rate": 2.8383162397122147e-06, + "loss": 0.6084, + "step": 4234 + }, + { + "epoch": 0.17475447718082032, + "grad_norm": 3.077991346119615, + "learning_rate": 2.838225688959987e-06, + "loss": 0.5961, + "step": 4235 + }, + { + "epoch": 0.17479574152017827, + "grad_norm": 2.317271992429847, + "learning_rate": 2.838135114303701e-06, + "loss": 0.5622, + "step": 4236 + }, + { + "epoch": 0.17483700585953618, + "grad_norm": 5.764074630497667, + "learning_rate": 2.838044515744974e-06, + "loss": 0.5224, + "step": 4237 + }, + { + "epoch": 0.17487827019889413, + "grad_norm": 2.2428215722217284, + "learning_rate": 2.837953893285425e-06, + "loss": 0.5751, + "step": 4238 + }, + { + "epoch": 0.17491953453825204, + "grad_norm": 3.3057427112174405, + "learning_rate": 2.837863246926672e-06, + "loss": 0.5678, + "step": 4239 + }, + { + "epoch": 0.17496079887760996, + "grad_norm": 3.0791363319937237, + "learning_rate": 2.837772576670335e-06, + "loss": 0.5241, + "step": 4240 + }, + { + "epoch": 0.1750020632169679, + "grad_norm": 3.964348781276152, + "learning_rate": 2.837681882518033e-06, + "loss": 0.5265, + "step": 4241 + }, + { + "epoch": 0.17504332755632582, + "grad_norm": 4.995989636158882, + "learning_rate": 2.837591164471386e-06, + "loss": 0.5842, + "step": 4242 + }, + { + "epoch": 0.17508459189568376, + "grad_norm": 1.9639804371519405, + "learning_rate": 2.837500422532015e-06, + "loss": 0.5438, + "step": 4243 + }, + { + "epoch": 0.17512585623504168, + "grad_norm": 2.297139986896118, + "learning_rate": 2.8374096567015402e-06, + "loss": 0.553, + "step": 4244 + }, + { + "epoch": 0.1751671205743996, + "grad_norm": 3.742734864353081, + "learning_rate": 2.8373188669815837e-06, + "loss": 0.5805, + "step": 4245 + }, + { + "epoch": 0.17520838491375754, + "grad_norm": 7.373198330391066, + "learning_rate": 2.837228053373766e-06, + "loss": 0.4823, + "step": 4246 + }, + { + "epoch": 0.17524964925311545, + "grad_norm": 3.251559138348081, + "learning_rate": 2.837137215879711e-06, + "loss": 0.5766, + "step": 4247 + }, + { + "epoch": 0.1752909135924734, + "grad_norm": 2.179363025648642, + "learning_rate": 2.83704635450104e-06, + "loss": 0.5112, + "step": 4248 + }, + { + "epoch": 0.1753321779318313, + "grad_norm": 2.6356509996577095, + "learning_rate": 2.836955469239376e-06, + "loss": 0.582, + "step": 4249 + }, + { + "epoch": 0.17537344227118923, + "grad_norm": 4.826123672862052, + "learning_rate": 2.836864560096343e-06, + "loss": 0.5422, + "step": 4250 + }, + { + "epoch": 0.17541470661054717, + "grad_norm": 3.922632802640128, + "learning_rate": 2.8367736270735646e-06, + "loss": 0.5845, + "step": 4251 + }, + { + "epoch": 0.1754559709499051, + "grad_norm": 2.6731773314005314, + "learning_rate": 2.836682670172665e-06, + "loss": 0.5841, + "step": 4252 + }, + { + "epoch": 0.17549723528926303, + "grad_norm": 2.9602303736387174, + "learning_rate": 2.8365916893952695e-06, + "loss": 0.536, + "step": 4253 + }, + { + "epoch": 0.17553849962862095, + "grad_norm": 2.6347798829054474, + "learning_rate": 2.8365006847430023e-06, + "loss": 0.5395, + "step": 4254 + }, + { + "epoch": 0.17557976396797886, + "grad_norm": 3.329814598248019, + "learning_rate": 2.8364096562174896e-06, + "loss": 0.5904, + "step": 4255 + }, + { + "epoch": 0.1756210283073368, + "grad_norm": 4.3635142923296115, + "learning_rate": 2.836318603820357e-06, + "loss": 0.6282, + "step": 4256 + }, + { + "epoch": 0.17566229264669472, + "grad_norm": 3.8493527140007107, + "learning_rate": 2.8362275275532316e-06, + "loss": 0.5114, + "step": 4257 + }, + { + "epoch": 0.17570355698605267, + "grad_norm": 2.6420520256533893, + "learning_rate": 2.8361364274177396e-06, + "loss": 0.5607, + "step": 4258 + }, + { + "epoch": 0.17574482132541058, + "grad_norm": 2.0629547545795663, + "learning_rate": 2.8360453034155088e-06, + "loss": 0.5306, + "step": 4259 + }, + { + "epoch": 0.1757860856647685, + "grad_norm": 14.208518401167094, + "learning_rate": 2.8359541555481665e-06, + "loss": 0.5994, + "step": 4260 + }, + { + "epoch": 0.17582735000412644, + "grad_norm": 2.929995479112457, + "learning_rate": 2.835862983817341e-06, + "loss": 0.5371, + "step": 4261 + }, + { + "epoch": 0.17586861434348436, + "grad_norm": 3.6061289370680853, + "learning_rate": 2.8357717882246604e-06, + "loss": 0.6012, + "step": 4262 + }, + { + "epoch": 0.17590987868284227, + "grad_norm": 2.8967247185091387, + "learning_rate": 2.835680568771754e-06, + "loss": 0.5583, + "step": 4263 + }, + { + "epoch": 0.17595114302220022, + "grad_norm": 3.646880199929476, + "learning_rate": 2.835589325460252e-06, + "loss": 0.5953, + "step": 4264 + }, + { + "epoch": 0.17599240736155813, + "grad_norm": 4.432372614459734, + "learning_rate": 2.835498058291783e-06, + "loss": 0.5526, + "step": 4265 + }, + { + "epoch": 0.17603367170091608, + "grad_norm": 2.8674304490748557, + "learning_rate": 2.8354067672679776e-06, + "loss": 0.563, + "step": 4266 + }, + { + "epoch": 0.176074936040274, + "grad_norm": 2.9233837310694146, + "learning_rate": 2.8353154523904673e-06, + "loss": 0.5557, + "step": 4267 + }, + { + "epoch": 0.1761162003796319, + "grad_norm": 2.712642599858171, + "learning_rate": 2.835224113660882e-06, + "loss": 0.5779, + "step": 4268 + }, + { + "epoch": 0.17615746471898985, + "grad_norm": 2.9359189282269127, + "learning_rate": 2.835132751080854e-06, + "loss": 0.5616, + "step": 4269 + }, + { + "epoch": 0.17619872905834777, + "grad_norm": 8.618556005851111, + "learning_rate": 2.835041364652015e-06, + "loss": 0.5364, + "step": 4270 + }, + { + "epoch": 0.1762399933977057, + "grad_norm": 3.021629321001134, + "learning_rate": 2.834949954375998e-06, + "loss": 0.5724, + "step": 4271 + }, + { + "epoch": 0.17628125773706363, + "grad_norm": 2.7609976820852307, + "learning_rate": 2.8348585202544348e-06, + "loss": 0.5189, + "step": 4272 + }, + { + "epoch": 0.17632252207642155, + "grad_norm": 4.045530647176396, + "learning_rate": 2.834767062288959e-06, + "loss": 0.6078, + "step": 4273 + }, + { + "epoch": 0.1763637864157795, + "grad_norm": 3.6432875469813806, + "learning_rate": 2.8346755804812046e-06, + "loss": 0.6138, + "step": 4274 + }, + { + "epoch": 0.1764050507551374, + "grad_norm": 3.3015503434422437, + "learning_rate": 2.8345840748328053e-06, + "loss": 0.541, + "step": 4275 + }, + { + "epoch": 0.17644631509449535, + "grad_norm": 1.9628065905293544, + "learning_rate": 2.834492545345396e-06, + "loss": 0.5657, + "step": 4276 + }, + { + "epoch": 0.17648757943385326, + "grad_norm": 2.9081151818122226, + "learning_rate": 2.8344009920206114e-06, + "loss": 0.5808, + "step": 4277 + }, + { + "epoch": 0.17652884377321118, + "grad_norm": 2.7495699760768377, + "learning_rate": 2.8343094148600867e-06, + "loss": 0.5817, + "step": 4278 + }, + { + "epoch": 0.17657010811256912, + "grad_norm": 2.2239770285038607, + "learning_rate": 2.834217813865458e-06, + "loss": 0.5504, + "step": 4279 + }, + { + "epoch": 0.17661137245192704, + "grad_norm": 6.957924613470844, + "learning_rate": 2.834126189038362e-06, + "loss": 0.5696, + "step": 4280 + }, + { + "epoch": 0.17665263679128498, + "grad_norm": 9.40511167831785, + "learning_rate": 2.834034540380434e-06, + "loss": 0.6448, + "step": 4281 + }, + { + "epoch": 0.1766939011306429, + "grad_norm": 2.3324502545542316, + "learning_rate": 2.833942867893312e-06, + "loss": 0.5489, + "step": 4282 + }, + { + "epoch": 0.17673516547000082, + "grad_norm": 2.7251828231231205, + "learning_rate": 2.833851171578634e-06, + "loss": 0.5413, + "step": 4283 + }, + { + "epoch": 0.17677642980935876, + "grad_norm": 2.5975545867099314, + "learning_rate": 2.833759451438037e-06, + "loss": 0.5441, + "step": 4284 + }, + { + "epoch": 0.17681769414871668, + "grad_norm": 3.661001351430918, + "learning_rate": 2.8336677074731593e-06, + "loss": 0.5408, + "step": 4285 + }, + { + "epoch": 0.17685895848807462, + "grad_norm": 2.541798426671282, + "learning_rate": 2.83357593968564e-06, + "loss": 0.5529, + "step": 4286 + }, + { + "epoch": 0.17690022282743254, + "grad_norm": 2.440791157558994, + "learning_rate": 2.8334841480771187e-06, + "loss": 0.5721, + "step": 4287 + }, + { + "epoch": 0.17694148716679045, + "grad_norm": 3.816919351667228, + "learning_rate": 2.8333923326492346e-06, + "loss": 0.6069, + "step": 4288 + }, + { + "epoch": 0.1769827515061484, + "grad_norm": 3.118713632012606, + "learning_rate": 2.8333004934036278e-06, + "loss": 0.5531, + "step": 4289 + }, + { + "epoch": 0.1770240158455063, + "grad_norm": 2.6311094265383037, + "learning_rate": 2.8332086303419387e-06, + "loss": 0.5644, + "step": 4290 + }, + { + "epoch": 0.17706528018486423, + "grad_norm": 5.380906994610784, + "learning_rate": 2.8331167434658083e-06, + "loss": 0.5623, + "step": 4291 + }, + { + "epoch": 0.17710654452422217, + "grad_norm": 2.5747520824857903, + "learning_rate": 2.833024832776878e-06, + "loss": 0.5453, + "step": 4292 + }, + { + "epoch": 0.1771478088635801, + "grad_norm": 4.005251658023458, + "learning_rate": 2.8329328982767893e-06, + "loss": 0.5635, + "step": 4293 + }, + { + "epoch": 0.17718907320293803, + "grad_norm": 5.039400721607574, + "learning_rate": 2.8328409399671843e-06, + "loss": 0.5873, + "step": 4294 + }, + { + "epoch": 0.17723033754229595, + "grad_norm": 4.435490087023916, + "learning_rate": 2.8327489578497066e-06, + "loss": 0.5607, + "step": 4295 + }, + { + "epoch": 0.17727160188165386, + "grad_norm": 2.9508569237210733, + "learning_rate": 2.832656951925998e-06, + "loss": 0.5172, + "step": 4296 + }, + { + "epoch": 0.1773128662210118, + "grad_norm": 3.1938516837430377, + "learning_rate": 2.8325649221977024e-06, + "loss": 0.5406, + "step": 4297 + }, + { + "epoch": 0.17735413056036972, + "grad_norm": 7.909637723786934, + "learning_rate": 2.832472868666464e-06, + "loss": 0.5808, + "step": 4298 + }, + { + "epoch": 0.17739539489972767, + "grad_norm": 5.430782559626018, + "learning_rate": 2.8323807913339263e-06, + "loss": 0.5349, + "step": 4299 + }, + { + "epoch": 0.17743665923908558, + "grad_norm": 4.53582190007908, + "learning_rate": 2.832288690201735e-06, + "loss": 0.5531, + "step": 4300 + }, + { + "epoch": 0.1774779235784435, + "grad_norm": 2.927650437957323, + "learning_rate": 2.832196565271535e-06, + "loss": 0.5642, + "step": 4301 + }, + { + "epoch": 0.17751918791780144, + "grad_norm": 2.129083676675933, + "learning_rate": 2.832104416544971e-06, + "loss": 0.5548, + "step": 4302 + }, + { + "epoch": 0.17756045225715936, + "grad_norm": 2.4986952745529973, + "learning_rate": 2.8320122440236903e-06, + "loss": 0.5946, + "step": 4303 + }, + { + "epoch": 0.1776017165965173, + "grad_norm": 2.977740755548936, + "learning_rate": 2.8319200477093383e-06, + "loss": 0.4931, + "step": 4304 + }, + { + "epoch": 0.17764298093587522, + "grad_norm": 2.8834829057219378, + "learning_rate": 2.8318278276035626e-06, + "loss": 0.5255, + "step": 4305 + }, + { + "epoch": 0.17768424527523313, + "grad_norm": 2.7853694530086925, + "learning_rate": 2.8317355837080106e-06, + "loss": 0.4919, + "step": 4306 + }, + { + "epoch": 0.17772550961459108, + "grad_norm": 3.977889927675248, + "learning_rate": 2.831643316024329e-06, + "loss": 0.5184, + "step": 4307 + }, + { + "epoch": 0.177766773953949, + "grad_norm": 30.54599244861868, + "learning_rate": 2.8315510245541666e-06, + "loss": 0.5259, + "step": 4308 + }, + { + "epoch": 0.17780803829330694, + "grad_norm": 2.3222856125285207, + "learning_rate": 2.831458709299172e-06, + "loss": 0.5208, + "step": 4309 + }, + { + "epoch": 0.17784930263266485, + "grad_norm": 2.781633569204328, + "learning_rate": 2.8313663702609936e-06, + "loss": 0.5179, + "step": 4310 + }, + { + "epoch": 0.17789056697202277, + "grad_norm": 3.7730540202036327, + "learning_rate": 2.831274007441282e-06, + "loss": 0.5757, + "step": 4311 + }, + { + "epoch": 0.1779318313113807, + "grad_norm": 4.0532137080324855, + "learning_rate": 2.831181620841686e-06, + "loss": 0.5549, + "step": 4312 + }, + { + "epoch": 0.17797309565073863, + "grad_norm": 2.6710587468583915, + "learning_rate": 2.8310892104638563e-06, + "loss": 0.5558, + "step": 4313 + }, + { + "epoch": 0.17801435999009657, + "grad_norm": 2.361989130171591, + "learning_rate": 2.830996776309443e-06, + "loss": 0.5528, + "step": 4314 + }, + { + "epoch": 0.1780556243294545, + "grad_norm": 2.1223457518136986, + "learning_rate": 2.8309043183800976e-06, + "loss": 0.5558, + "step": 4315 + }, + { + "epoch": 0.1780968886688124, + "grad_norm": 3.3082647442509354, + "learning_rate": 2.830811836677472e-06, + "loss": 0.5221, + "step": 4316 + }, + { + "epoch": 0.17813815300817035, + "grad_norm": 2.6717006976869464, + "learning_rate": 2.8307193312032174e-06, + "loss": 0.5814, + "step": 4317 + }, + { + "epoch": 0.17817941734752826, + "grad_norm": 3.0442909541351773, + "learning_rate": 2.8306268019589867e-06, + "loss": 0.5931, + "step": 4318 + }, + { + "epoch": 0.1782206816868862, + "grad_norm": 2.804207091083024, + "learning_rate": 2.830534248946433e-06, + "loss": 0.619, + "step": 4319 + }, + { + "epoch": 0.17826194602624412, + "grad_norm": 2.663735747515061, + "learning_rate": 2.830441672167209e-06, + "loss": 0.6006, + "step": 4320 + }, + { + "epoch": 0.17830321036560204, + "grad_norm": 2.378101601148916, + "learning_rate": 2.8303490716229686e-06, + "loss": 0.5505, + "step": 4321 + }, + { + "epoch": 0.17834447470495998, + "grad_norm": 3.8267710686990766, + "learning_rate": 2.8302564473153655e-06, + "loss": 0.5692, + "step": 4322 + }, + { + "epoch": 0.1783857390443179, + "grad_norm": 8.754006458033073, + "learning_rate": 2.8301637992460543e-06, + "loss": 0.5361, + "step": 4323 + }, + { + "epoch": 0.17842700338367581, + "grad_norm": 2.771454120929935, + "learning_rate": 2.8300711274166904e-06, + "loss": 0.5539, + "step": 4324 + }, + { + "epoch": 0.17846826772303376, + "grad_norm": 3.354480820529135, + "learning_rate": 2.8299784318289287e-06, + "loss": 0.5537, + "step": 4325 + }, + { + "epoch": 0.17850953206239167, + "grad_norm": 2.7596141331760604, + "learning_rate": 2.829885712484425e-06, + "loss": 0.5047, + "step": 4326 + }, + { + "epoch": 0.17855079640174962, + "grad_norm": 3.227996900284203, + "learning_rate": 2.829792969384836e-06, + "loss": 0.5362, + "step": 4327 + }, + { + "epoch": 0.17859206074110753, + "grad_norm": 2.5122027721498053, + "learning_rate": 2.8297002025318176e-06, + "loss": 0.5091, + "step": 4328 + }, + { + "epoch": 0.17863332508046545, + "grad_norm": 3.5749287981012423, + "learning_rate": 2.8296074119270274e-06, + "loss": 0.5872, + "step": 4329 + }, + { + "epoch": 0.1786745894198234, + "grad_norm": 2.7287326157927376, + "learning_rate": 2.8295145975721226e-06, + "loss": 0.531, + "step": 4330 + }, + { + "epoch": 0.1787158537591813, + "grad_norm": 2.6901404494174015, + "learning_rate": 2.829421759468761e-06, + "loss": 0.5846, + "step": 4331 + }, + { + "epoch": 0.17875711809853925, + "grad_norm": 2.7695805198597725, + "learning_rate": 2.8293288976186007e-06, + "loss": 0.5699, + "step": 4332 + }, + { + "epoch": 0.17879838243789717, + "grad_norm": 3.0135030661895463, + "learning_rate": 2.8292360120233016e-06, + "loss": 0.5485, + "step": 4333 + }, + { + "epoch": 0.17883964677725508, + "grad_norm": 2.0941832646495917, + "learning_rate": 2.8291431026845216e-06, + "loss": 0.5648, + "step": 4334 + }, + { + "epoch": 0.17888091111661303, + "grad_norm": 2.826945539230287, + "learning_rate": 2.8290501696039206e-06, + "loss": 0.526, + "step": 4335 + }, + { + "epoch": 0.17892217545597094, + "grad_norm": 4.083595221761225, + "learning_rate": 2.8289572127831596e-06, + "loss": 0.5773, + "step": 4336 + }, + { + "epoch": 0.1789634397953289, + "grad_norm": 3.225298139704099, + "learning_rate": 2.8288642322238975e-06, + "loss": 0.5629, + "step": 4337 + }, + { + "epoch": 0.1790047041346868, + "grad_norm": 5.53844207631356, + "learning_rate": 2.828771227927796e-06, + "loss": 0.5982, + "step": 4338 + }, + { + "epoch": 0.17904596847404472, + "grad_norm": 6.456412050560577, + "learning_rate": 2.828678199896517e-06, + "loss": 0.562, + "step": 4339 + }, + { + "epoch": 0.17908723281340266, + "grad_norm": 2.034271030678036, + "learning_rate": 2.828585148131721e-06, + "loss": 0.5504, + "step": 4340 + }, + { + "epoch": 0.17912849715276058, + "grad_norm": 7.052799356836, + "learning_rate": 2.8284920726350703e-06, + "loss": 0.5558, + "step": 4341 + }, + { + "epoch": 0.17916976149211852, + "grad_norm": 2.9835323657704573, + "learning_rate": 2.8283989734082282e-06, + "loss": 0.5892, + "step": 4342 + }, + { + "epoch": 0.17921102583147644, + "grad_norm": 4.601209698799987, + "learning_rate": 2.828305850452857e-06, + "loss": 0.5974, + "step": 4343 + }, + { + "epoch": 0.17925229017083436, + "grad_norm": 2.32197319874585, + "learning_rate": 2.8282127037706207e-06, + "loss": 0.5694, + "step": 4344 + }, + { + "epoch": 0.1792935545101923, + "grad_norm": 4.628232360835202, + "learning_rate": 2.828119533363183e-06, + "loss": 0.532, + "step": 4345 + }, + { + "epoch": 0.17933481884955021, + "grad_norm": 2.2799711422385815, + "learning_rate": 2.8280263392322075e-06, + "loss": 0.5436, + "step": 4346 + }, + { + "epoch": 0.17937608318890816, + "grad_norm": 6.529807777372378, + "learning_rate": 2.8279331213793596e-06, + "loss": 0.5297, + "step": 4347 + }, + { + "epoch": 0.17941734752826607, + "grad_norm": 3.0935839459487515, + "learning_rate": 2.8278398798063044e-06, + "loss": 0.5547, + "step": 4348 + }, + { + "epoch": 0.179458611867624, + "grad_norm": 3.31629310337032, + "learning_rate": 2.8277466145147067e-06, + "loss": 0.6441, + "step": 4349 + }, + { + "epoch": 0.17949987620698193, + "grad_norm": 2.5647544200797694, + "learning_rate": 2.827653325506233e-06, + "loss": 0.5642, + "step": 4350 + }, + { + "epoch": 0.17954114054633985, + "grad_norm": 2.5416544665023997, + "learning_rate": 2.82756001278255e-06, + "loss": 0.5485, + "step": 4351 + }, + { + "epoch": 0.17958240488569777, + "grad_norm": 3.5836972029957055, + "learning_rate": 2.827466676345324e-06, + "loss": 0.5327, + "step": 4352 + }, + { + "epoch": 0.1796236692250557, + "grad_norm": 5.408504175732934, + "learning_rate": 2.8273733161962224e-06, + "loss": 0.5489, + "step": 4353 + }, + { + "epoch": 0.17966493356441363, + "grad_norm": 4.538963776262559, + "learning_rate": 2.8272799323369125e-06, + "loss": 0.5322, + "step": 4354 + }, + { + "epoch": 0.17970619790377157, + "grad_norm": 2.6218781550269186, + "learning_rate": 2.827186524769063e-06, + "loss": 0.5726, + "step": 4355 + }, + { + "epoch": 0.17974746224312949, + "grad_norm": 2.722636803227478, + "learning_rate": 2.8270930934943422e-06, + "loss": 0.5631, + "step": 4356 + }, + { + "epoch": 0.1797887265824874, + "grad_norm": 3.179433590219043, + "learning_rate": 2.8269996385144183e-06, + "loss": 0.5135, + "step": 4357 + }, + { + "epoch": 0.17982999092184535, + "grad_norm": 2.6822718705104185, + "learning_rate": 2.8269061598309618e-06, + "loss": 0.5694, + "step": 4358 + }, + { + "epoch": 0.17987125526120326, + "grad_norm": 2.8263627459345906, + "learning_rate": 2.8268126574456412e-06, + "loss": 0.5935, + "step": 4359 + }, + { + "epoch": 0.1799125196005612, + "grad_norm": 13.79534219040872, + "learning_rate": 2.826719131360128e-06, + "loss": 0.6048, + "step": 4360 + }, + { + "epoch": 0.17995378393991912, + "grad_norm": 7.27189966538425, + "learning_rate": 2.8266255815760918e-06, + "loss": 0.5843, + "step": 4361 + }, + { + "epoch": 0.17999504827927704, + "grad_norm": 2.2230662091876097, + "learning_rate": 2.826532008095204e-06, + "loss": 0.532, + "step": 4362 + }, + { + "epoch": 0.18003631261863498, + "grad_norm": 3.661465663215006, + "learning_rate": 2.8264384109191366e-06, + "loss": 0.6028, + "step": 4363 + }, + { + "epoch": 0.1800775769579929, + "grad_norm": 3.5244466202315037, + "learning_rate": 2.8263447900495603e-06, + "loss": 0.5477, + "step": 4364 + }, + { + "epoch": 0.18011884129735084, + "grad_norm": 4.82270150091494, + "learning_rate": 2.826251145488148e-06, + "loss": 0.5185, + "step": 4365 + }, + { + "epoch": 0.18016010563670876, + "grad_norm": 3.6591582834365592, + "learning_rate": 2.826157477236573e-06, + "loss": 0.5447, + "step": 4366 + }, + { + "epoch": 0.18020136997606667, + "grad_norm": 2.959090228714335, + "learning_rate": 2.826063785296508e-06, + "loss": 0.592, + "step": 4367 + }, + { + "epoch": 0.18024263431542462, + "grad_norm": 7.384153701202738, + "learning_rate": 2.8259700696696256e-06, + "loss": 0.6274, + "step": 4368 + }, + { + "epoch": 0.18028389865478253, + "grad_norm": 3.3877436086379236, + "learning_rate": 2.825876330357601e-06, + "loss": 0.5747, + "step": 4369 + }, + { + "epoch": 0.18032516299414048, + "grad_norm": 2.8113706806301506, + "learning_rate": 2.825782567362109e-06, + "loss": 0.4947, + "step": 4370 + }, + { + "epoch": 0.1803664273334984, + "grad_norm": 3.280817853310195, + "learning_rate": 2.825688780684823e-06, + "loss": 0.5457, + "step": 4371 + }, + { + "epoch": 0.1804076916728563, + "grad_norm": 3.0202581988082535, + "learning_rate": 2.8255949703274196e-06, + "loss": 0.5779, + "step": 4372 + }, + { + "epoch": 0.18044895601221425, + "grad_norm": 11.824331816703102, + "learning_rate": 2.8255011362915735e-06, + "loss": 0.568, + "step": 4373 + }, + { + "epoch": 0.18049022035157217, + "grad_norm": 3.102015613217723, + "learning_rate": 2.8254072785789614e-06, + "loss": 0.5863, + "step": 4374 + }, + { + "epoch": 0.1805314846909301, + "grad_norm": 4.003651619141732, + "learning_rate": 2.8253133971912597e-06, + "loss": 0.5812, + "step": 4375 + }, + { + "epoch": 0.18057274903028803, + "grad_norm": 3.059729552541652, + "learning_rate": 2.825219492130145e-06, + "loss": 0.5701, + "step": 4376 + }, + { + "epoch": 0.18061401336964594, + "grad_norm": 6.938424614019373, + "learning_rate": 2.825125563397295e-06, + "loss": 0.5716, + "step": 4377 + }, + { + "epoch": 0.1806552777090039, + "grad_norm": 3.2512744971481156, + "learning_rate": 2.8250316109943876e-06, + "loss": 0.5652, + "step": 4378 + }, + { + "epoch": 0.1806965420483618, + "grad_norm": 2.29596643138158, + "learning_rate": 2.824937634923101e-06, + "loss": 0.5835, + "step": 4379 + }, + { + "epoch": 0.18073780638771975, + "grad_norm": 3.6175252020630864, + "learning_rate": 2.8248436351851136e-06, + "loss": 0.584, + "step": 4380 + }, + { + "epoch": 0.18077907072707766, + "grad_norm": 4.410722908363925, + "learning_rate": 2.8247496117821044e-06, + "loss": 0.6384, + "step": 4381 + }, + { + "epoch": 0.18082033506643558, + "grad_norm": 2.8234609212403576, + "learning_rate": 2.824655564715754e-06, + "loss": 0.5892, + "step": 4382 + }, + { + "epoch": 0.18086159940579352, + "grad_norm": 13.16562129406188, + "learning_rate": 2.8245614939877408e-06, + "loss": 0.5604, + "step": 4383 + }, + { + "epoch": 0.18090286374515144, + "grad_norm": 3.350129844399933, + "learning_rate": 2.8244673995997453e-06, + "loss": 0.4923, + "step": 4384 + }, + { + "epoch": 0.18094412808450935, + "grad_norm": 2.5474730717805576, + "learning_rate": 2.8243732815534496e-06, + "loss": 0.5543, + "step": 4385 + }, + { + "epoch": 0.1809853924238673, + "grad_norm": 3.827006702064682, + "learning_rate": 2.8242791398505338e-06, + "loss": 0.5353, + "step": 4386 + }, + { + "epoch": 0.1810266567632252, + "grad_norm": 4.637766721239914, + "learning_rate": 2.8241849744926796e-06, + "loss": 0.5438, + "step": 4387 + }, + { + "epoch": 0.18106792110258316, + "grad_norm": 2.2688943620753705, + "learning_rate": 2.824090785481569e-06, + "loss": 0.5934, + "step": 4388 + }, + { + "epoch": 0.18110918544194107, + "grad_norm": 2.8954676457890853, + "learning_rate": 2.8239965728188846e-06, + "loss": 0.5058, + "step": 4389 + }, + { + "epoch": 0.181150449781299, + "grad_norm": 2.915570442685232, + "learning_rate": 2.823902336506309e-06, + "loss": 0.5819, + "step": 4390 + }, + { + "epoch": 0.18119171412065693, + "grad_norm": 7.162189348076632, + "learning_rate": 2.8238080765455265e-06, + "loss": 0.6589, + "step": 4391 + }, + { + "epoch": 0.18123297846001485, + "grad_norm": 5.316317865464965, + "learning_rate": 2.8237137929382197e-06, + "loss": 0.5555, + "step": 4392 + }, + { + "epoch": 0.1812742427993728, + "grad_norm": 2.2028059118241874, + "learning_rate": 2.8236194856860736e-06, + "loss": 0.6043, + "step": 4393 + }, + { + "epoch": 0.1813155071387307, + "grad_norm": 3.266079789142147, + "learning_rate": 2.823525154790772e-06, + "loss": 0.612, + "step": 4394 + }, + { + "epoch": 0.18135677147808862, + "grad_norm": 4.785802080050962, + "learning_rate": 2.8234308002539996e-06, + "loss": 0.5197, + "step": 4395 + }, + { + "epoch": 0.18139803581744657, + "grad_norm": 1.9389384620998258, + "learning_rate": 2.823336422077443e-06, + "loss": 0.5862, + "step": 4396 + }, + { + "epoch": 0.18143930015680448, + "grad_norm": 9.453542133020735, + "learning_rate": 2.8232420202627876e-06, + "loss": 0.6364, + "step": 4397 + }, + { + "epoch": 0.18148056449616243, + "grad_norm": 5.0424165709750195, + "learning_rate": 2.823147594811719e-06, + "loss": 0.572, + "step": 4398 + }, + { + "epoch": 0.18152182883552034, + "grad_norm": 4.186260710098319, + "learning_rate": 2.8230531457259245e-06, + "loss": 0.5927, + "step": 4399 + }, + { + "epoch": 0.18156309317487826, + "grad_norm": 2.8132649366174483, + "learning_rate": 2.822958673007091e-06, + "loss": 0.5416, + "step": 4400 + }, + { + "epoch": 0.1816043575142362, + "grad_norm": 4.947236723665208, + "learning_rate": 2.8228641766569064e-06, + "loss": 0.5451, + "step": 4401 + }, + { + "epoch": 0.18164562185359412, + "grad_norm": 2.759447798451973, + "learning_rate": 2.822769656677058e-06, + "loss": 0.5103, + "step": 4402 + }, + { + "epoch": 0.18168688619295206, + "grad_norm": 4.79707187869223, + "learning_rate": 2.8226751130692343e-06, + "loss": 0.5866, + "step": 4403 + }, + { + "epoch": 0.18172815053230998, + "grad_norm": 8.872346993934707, + "learning_rate": 2.8225805458351244e-06, + "loss": 0.4973, + "step": 4404 + }, + { + "epoch": 0.1817694148716679, + "grad_norm": 6.189611141030555, + "learning_rate": 2.8224859549764174e-06, + "loss": 0.617, + "step": 4405 + }, + { + "epoch": 0.18181067921102584, + "grad_norm": 2.3921630147129505, + "learning_rate": 2.8223913404948034e-06, + "loss": 0.54, + "step": 4406 + }, + { + "epoch": 0.18185194355038375, + "grad_norm": 3.2213055968956352, + "learning_rate": 2.822296702391971e-06, + "loss": 0.5715, + "step": 4407 + }, + { + "epoch": 0.1818932078897417, + "grad_norm": 10.257762232061285, + "learning_rate": 2.822202040669612e-06, + "loss": 0.5775, + "step": 4408 + }, + { + "epoch": 0.18193447222909961, + "grad_norm": 2.5637583702706013, + "learning_rate": 2.822107355329417e-06, + "loss": 0.5357, + "step": 4409 + }, + { + "epoch": 0.18197573656845753, + "grad_norm": 2.620207549795289, + "learning_rate": 2.822012646373078e-06, + "loss": 0.5499, + "step": 4410 + }, + { + "epoch": 0.18201700090781547, + "grad_norm": 3.301965512641069, + "learning_rate": 2.8219179138022847e-06, + "loss": 0.5974, + "step": 4411 + }, + { + "epoch": 0.1820582652471734, + "grad_norm": 6.176605937564343, + "learning_rate": 2.821823157618731e-06, + "loss": 0.5769, + "step": 4412 + }, + { + "epoch": 0.18209952958653133, + "grad_norm": 3.6738930483112178, + "learning_rate": 2.8217283778241088e-06, + "loss": 0.6113, + "step": 4413 + }, + { + "epoch": 0.18214079392588925, + "grad_norm": 2.861357237356281, + "learning_rate": 2.8216335744201117e-06, + "loss": 0.6032, + "step": 4414 + }, + { + "epoch": 0.18218205826524717, + "grad_norm": 5.050356797493491, + "learning_rate": 2.821538747408433e-06, + "loss": 0.5279, + "step": 4415 + }, + { + "epoch": 0.1822233226046051, + "grad_norm": 8.383648998038145, + "learning_rate": 2.8214438967907656e-06, + "loss": 0.5595, + "step": 4416 + }, + { + "epoch": 0.18226458694396303, + "grad_norm": 2.7215808815012568, + "learning_rate": 2.821349022568805e-06, + "loss": 0.6084, + "step": 4417 + }, + { + "epoch": 0.18230585128332094, + "grad_norm": 2.998584523088135, + "learning_rate": 2.821254124744245e-06, + "loss": 0.575, + "step": 4418 + }, + { + "epoch": 0.18234711562267888, + "grad_norm": 2.941935629402144, + "learning_rate": 2.821159203318781e-06, + "loss": 0.5622, + "step": 4419 + }, + { + "epoch": 0.1823883799620368, + "grad_norm": 2.8581801045367654, + "learning_rate": 2.8210642582941086e-06, + "loss": 0.5109, + "step": 4420 + }, + { + "epoch": 0.18242964430139474, + "grad_norm": 2.8603835187638236, + "learning_rate": 2.8209692896719238e-06, + "loss": 0.5627, + "step": 4421 + }, + { + "epoch": 0.18247090864075266, + "grad_norm": 2.826919480866363, + "learning_rate": 2.820874297453923e-06, + "loss": 0.5852, + "step": 4422 + }, + { + "epoch": 0.18251217298011058, + "grad_norm": 3.091193433235489, + "learning_rate": 2.8207792816418027e-06, + "loss": 0.5478, + "step": 4423 + }, + { + "epoch": 0.18255343731946852, + "grad_norm": 10.400025750018921, + "learning_rate": 2.820684242237261e-06, + "loss": 0.5575, + "step": 4424 + }, + { + "epoch": 0.18259470165882644, + "grad_norm": 2.7131380316150806, + "learning_rate": 2.8205891792419943e-06, + "loss": 0.5201, + "step": 4425 + }, + { + "epoch": 0.18263596599818438, + "grad_norm": 3.7382654007243565, + "learning_rate": 2.8204940926577017e-06, + "loss": 0.5453, + "step": 4426 + }, + { + "epoch": 0.1826772303375423, + "grad_norm": 12.014010151373087, + "learning_rate": 2.820398982486081e-06, + "loss": 0.5917, + "step": 4427 + }, + { + "epoch": 0.1827184946769002, + "grad_norm": 5.75945277052927, + "learning_rate": 2.8203038487288307e-06, + "loss": 0.6189, + "step": 4428 + }, + { + "epoch": 0.18275975901625816, + "grad_norm": 2.418319495943658, + "learning_rate": 2.8202086913876514e-06, + "loss": 0.595, + "step": 4429 + }, + { + "epoch": 0.18280102335561607, + "grad_norm": 7.106529087686501, + "learning_rate": 2.8201135104642427e-06, + "loss": 0.584, + "step": 4430 + }, + { + "epoch": 0.18284228769497402, + "grad_norm": 4.251657513526222, + "learning_rate": 2.8200183059603036e-06, + "loss": 0.5565, + "step": 4431 + }, + { + "epoch": 0.18288355203433193, + "grad_norm": 3.2068811863749263, + "learning_rate": 2.8199230778775355e-06, + "loss": 0.5469, + "step": 4432 + }, + { + "epoch": 0.18292481637368985, + "grad_norm": 3.6621669859729056, + "learning_rate": 2.819827826217639e-06, + "loss": 0.5376, + "step": 4433 + }, + { + "epoch": 0.1829660807130478, + "grad_norm": 10.125132868128082, + "learning_rate": 2.8197325509823164e-06, + "loss": 0.5934, + "step": 4434 + }, + { + "epoch": 0.1830073450524057, + "grad_norm": 4.665102602573318, + "learning_rate": 2.8196372521732687e-06, + "loss": 0.5069, + "step": 4435 + }, + { + "epoch": 0.18304860939176365, + "grad_norm": 9.305555275677442, + "learning_rate": 2.8195419297921984e-06, + "loss": 0.5224, + "step": 4436 + }, + { + "epoch": 0.18308987373112157, + "grad_norm": 2.035007821374209, + "learning_rate": 2.8194465838408083e-06, + "loss": 0.5524, + "step": 4437 + }, + { + "epoch": 0.18313113807047948, + "grad_norm": 2.3466178824163175, + "learning_rate": 2.8193512143208014e-06, + "loss": 0.6119, + "step": 4438 + }, + { + "epoch": 0.18317240240983743, + "grad_norm": 6.531837012585863, + "learning_rate": 2.8192558212338815e-06, + "loss": 0.5721, + "step": 4439 + }, + { + "epoch": 0.18321366674919534, + "grad_norm": 4.260177053802271, + "learning_rate": 2.8191604045817524e-06, + "loss": 0.5402, + "step": 4440 + }, + { + "epoch": 0.18325493108855329, + "grad_norm": 3.9069019643511282, + "learning_rate": 2.819064964366118e-06, + "loss": 0.5516, + "step": 4441 + }, + { + "epoch": 0.1832961954279112, + "grad_norm": 3.0428383082867296, + "learning_rate": 2.818969500588684e-06, + "loss": 0.5483, + "step": 4442 + }, + { + "epoch": 0.18333745976726912, + "grad_norm": 2.1826253010458596, + "learning_rate": 2.818874013251155e-06, + "loss": 0.5355, + "step": 4443 + }, + { + "epoch": 0.18337872410662706, + "grad_norm": 3.309336021001944, + "learning_rate": 2.818778502355237e-06, + "loss": 0.5849, + "step": 4444 + }, + { + "epoch": 0.18341998844598498, + "grad_norm": 3.665613155314562, + "learning_rate": 2.8186829679026355e-06, + "loss": 0.5685, + "step": 4445 + }, + { + "epoch": 0.1834612527853429, + "grad_norm": 3.210219612316148, + "learning_rate": 2.8185874098950576e-06, + "loss": 0.543, + "step": 4446 + }, + { + "epoch": 0.18350251712470084, + "grad_norm": 8.548086544477059, + "learning_rate": 2.8184918283342098e-06, + "loss": 0.5869, + "step": 4447 + }, + { + "epoch": 0.18354378146405875, + "grad_norm": 2.721836410379898, + "learning_rate": 2.8183962232217993e-06, + "loss": 0.5587, + "step": 4448 + }, + { + "epoch": 0.1835850458034167, + "grad_norm": 5.069807797868821, + "learning_rate": 2.8183005945595347e-06, + "loss": 0.5264, + "step": 4449 + }, + { + "epoch": 0.1836263101427746, + "grad_norm": 4.543065293340037, + "learning_rate": 2.8182049423491236e-06, + "loss": 0.5408, + "step": 4450 + }, + { + "epoch": 0.18366757448213253, + "grad_norm": 2.5065208965302923, + "learning_rate": 2.8181092665922744e-06, + "loss": 0.5535, + "step": 4451 + }, + { + "epoch": 0.18370883882149047, + "grad_norm": 2.4397450879028324, + "learning_rate": 2.8180135672906955e-06, + "loss": 0.6007, + "step": 4452 + }, + { + "epoch": 0.1837501031608484, + "grad_norm": 18.704477368012167, + "learning_rate": 2.817917844446098e-06, + "loss": 0.5538, + "step": 4453 + }, + { + "epoch": 0.18379136750020633, + "grad_norm": 2.468297162282446, + "learning_rate": 2.8178220980601913e-06, + "loss": 0.5711, + "step": 4454 + }, + { + "epoch": 0.18383263183956425, + "grad_norm": 4.803152189989216, + "learning_rate": 2.8177263281346845e-06, + "loss": 0.6038, + "step": 4455 + }, + { + "epoch": 0.18387389617892216, + "grad_norm": 5.086279246816927, + "learning_rate": 2.8176305346712892e-06, + "loss": 0.6322, + "step": 4456 + }, + { + "epoch": 0.1839151605182801, + "grad_norm": 3.4943385935259568, + "learning_rate": 2.8175347176717164e-06, + "loss": 0.5192, + "step": 4457 + }, + { + "epoch": 0.18395642485763802, + "grad_norm": 5.773312376177108, + "learning_rate": 2.8174388771376774e-06, + "loss": 0.5904, + "step": 4458 + }, + { + "epoch": 0.18399768919699597, + "grad_norm": 6.081591454617566, + "learning_rate": 2.8173430130708846e-06, + "loss": 0.627, + "step": 4459 + }, + { + "epoch": 0.18403895353635388, + "grad_norm": 5.065704636945394, + "learning_rate": 2.81724712547305e-06, + "loss": 0.5665, + "step": 4460 + }, + { + "epoch": 0.1840802178757118, + "grad_norm": 4.051330559373775, + "learning_rate": 2.8171512143458865e-06, + "loss": 0.5325, + "step": 4461 + }, + { + "epoch": 0.18412148221506974, + "grad_norm": 3.4409349218106806, + "learning_rate": 2.817055279691107e-06, + "loss": 0.6485, + "step": 4462 + }, + { + "epoch": 0.18416274655442766, + "grad_norm": 2.479273787017348, + "learning_rate": 2.8169593215104263e-06, + "loss": 0.5567, + "step": 4463 + }, + { + "epoch": 0.1842040108937856, + "grad_norm": 10.063485186845961, + "learning_rate": 2.816863339805557e-06, + "loss": 0.5629, + "step": 4464 + }, + { + "epoch": 0.18424527523314352, + "grad_norm": 4.091672279268826, + "learning_rate": 2.8167673345782137e-06, + "loss": 0.5381, + "step": 4465 + }, + { + "epoch": 0.18428653957250143, + "grad_norm": 10.280236458219338, + "learning_rate": 2.8166713058301123e-06, + "loss": 0.5718, + "step": 4466 + }, + { + "epoch": 0.18432780391185938, + "grad_norm": 2.9502340331089623, + "learning_rate": 2.8165752535629675e-06, + "loss": 0.5295, + "step": 4467 + }, + { + "epoch": 0.1843690682512173, + "grad_norm": 2.3415648725236795, + "learning_rate": 2.816479177778495e-06, + "loss": 0.5439, + "step": 4468 + }, + { + "epoch": 0.18441033259057524, + "grad_norm": 5.34342978779321, + "learning_rate": 2.8163830784784115e-06, + "loss": 0.597, + "step": 4469 + }, + { + "epoch": 0.18445159692993315, + "grad_norm": 4.850951665987917, + "learning_rate": 2.816286955664433e-06, + "loss": 0.4948, + "step": 4470 + }, + { + "epoch": 0.18449286126929107, + "grad_norm": 4.9388769984964735, + "learning_rate": 2.816190809338276e-06, + "loss": 0.5372, + "step": 4471 + }, + { + "epoch": 0.184534125608649, + "grad_norm": 25.652679580609657, + "learning_rate": 2.8160946395016592e-06, + "loss": 0.5623, + "step": 4472 + }, + { + "epoch": 0.18457538994800693, + "grad_norm": 4.574101465938704, + "learning_rate": 2.8159984461562995e-06, + "loss": 0.5415, + "step": 4473 + }, + { + "epoch": 0.18461665428736487, + "grad_norm": 3.068870892245148, + "learning_rate": 2.8159022293039154e-06, + "loss": 0.5357, + "step": 4474 + }, + { + "epoch": 0.1846579186267228, + "grad_norm": 4.381179151214969, + "learning_rate": 2.8158059889462255e-06, + "loss": 0.5378, + "step": 4475 + }, + { + "epoch": 0.1846991829660807, + "grad_norm": 3.688052330542956, + "learning_rate": 2.815709725084949e-06, + "loss": 0.6049, + "step": 4476 + }, + { + "epoch": 0.18474044730543865, + "grad_norm": 3.1261850230831434, + "learning_rate": 2.8156134377218056e-06, + "loss": 0.5926, + "step": 4477 + }, + { + "epoch": 0.18478171164479656, + "grad_norm": 3.375541656336972, + "learning_rate": 2.815517126858515e-06, + "loss": 0.5702, + "step": 4478 + }, + { + "epoch": 0.18482297598415448, + "grad_norm": 2.4853301030546877, + "learning_rate": 2.8154207924967977e-06, + "loss": 0.529, + "step": 4479 + }, + { + "epoch": 0.18486424032351242, + "grad_norm": 5.224476777524843, + "learning_rate": 2.8153244346383743e-06, + "loss": 0.5227, + "step": 4480 + }, + { + "epoch": 0.18490550466287034, + "grad_norm": 3.8979762120229973, + "learning_rate": 2.815228053284966e-06, + "loss": 0.5535, + "step": 4481 + }, + { + "epoch": 0.18494676900222828, + "grad_norm": 3.9305946853943436, + "learning_rate": 2.8151316484382946e-06, + "loss": 0.5666, + "step": 4482 + }, + { + "epoch": 0.1849880333415862, + "grad_norm": 2.51494357513711, + "learning_rate": 2.815035220100082e-06, + "loss": 0.596, + "step": 4483 + }, + { + "epoch": 0.18502929768094412, + "grad_norm": 4.005351462987197, + "learning_rate": 2.8149387682720504e-06, + "loss": 0.5498, + "step": 4484 + }, + { + "epoch": 0.18507056202030206, + "grad_norm": 3.724648191151729, + "learning_rate": 2.814842292955923e-06, + "loss": 0.5793, + "step": 4485 + }, + { + "epoch": 0.18511182635965998, + "grad_norm": 2.7273999308446593, + "learning_rate": 2.8147457941534233e-06, + "loss": 0.5682, + "step": 4486 + }, + { + "epoch": 0.18515309069901792, + "grad_norm": 17.422094999869906, + "learning_rate": 2.8146492718662744e-06, + "loss": 0.5402, + "step": 4487 + }, + { + "epoch": 0.18519435503837584, + "grad_norm": 3.1515078209998166, + "learning_rate": 2.814552726096201e-06, + "loss": 0.5398, + "step": 4488 + }, + { + "epoch": 0.18523561937773375, + "grad_norm": 1.9465042457887534, + "learning_rate": 2.814456156844927e-06, + "loss": 0.562, + "step": 4489 + }, + { + "epoch": 0.1852768837170917, + "grad_norm": 5.351456311451444, + "learning_rate": 2.8143595641141783e-06, + "loss": 0.5816, + "step": 4490 + }, + { + "epoch": 0.1853181480564496, + "grad_norm": 7.500818568795052, + "learning_rate": 2.8142629479056792e-06, + "loss": 0.5191, + "step": 4491 + }, + { + "epoch": 0.18535941239580755, + "grad_norm": 4.832820374192264, + "learning_rate": 2.8141663082211563e-06, + "loss": 0.5445, + "step": 4492 + }, + { + "epoch": 0.18540067673516547, + "grad_norm": 8.074905566544322, + "learning_rate": 2.814069645062336e-06, + "loss": 0.5201, + "step": 4493 + }, + { + "epoch": 0.1854419410745234, + "grad_norm": 4.16994083273226, + "learning_rate": 2.813972958430944e-06, + "loss": 0.5938, + "step": 4494 + }, + { + "epoch": 0.18548320541388133, + "grad_norm": 2.0970350323517013, + "learning_rate": 2.813876248328708e-06, + "loss": 0.5287, + "step": 4495 + }, + { + "epoch": 0.18552446975323925, + "grad_norm": 2.485286388390517, + "learning_rate": 2.8137795147573555e-06, + "loss": 0.5375, + "step": 4496 + }, + { + "epoch": 0.1855657340925972, + "grad_norm": 2.273511036872336, + "learning_rate": 2.8136827577186146e-06, + "loss": 0.5563, + "step": 4497 + }, + { + "epoch": 0.1856069984319551, + "grad_norm": 2.985196581721337, + "learning_rate": 2.8135859772142133e-06, + "loss": 0.5539, + "step": 4498 + }, + { + "epoch": 0.18564826277131302, + "grad_norm": 2.6402389079586324, + "learning_rate": 2.8134891732458795e-06, + "loss": 0.5409, + "step": 4499 + }, + { + "epoch": 0.18568952711067097, + "grad_norm": 3.926529501599978, + "learning_rate": 2.8133923458153443e-06, + "loss": 0.6398, + "step": 4500 + }, + { + "epoch": 0.18573079145002888, + "grad_norm": 4.412291273133433, + "learning_rate": 2.8132954949243355e-06, + "loss": 0.5988, + "step": 4501 + }, + { + "epoch": 0.18577205578938683, + "grad_norm": 5.634457541249376, + "learning_rate": 2.813198620574584e-06, + "loss": 0.551, + "step": 4502 + }, + { + "epoch": 0.18581332012874474, + "grad_norm": 2.7619751180936807, + "learning_rate": 2.81310172276782e-06, + "loss": 0.594, + "step": 4503 + }, + { + "epoch": 0.18585458446810266, + "grad_norm": 3.3245420262510965, + "learning_rate": 2.8130048015057743e-06, + "loss": 0.5371, + "step": 4504 + }, + { + "epoch": 0.1858958488074606, + "grad_norm": 4.64268576888755, + "learning_rate": 2.8129078567901783e-06, + "loss": 0.6017, + "step": 4505 + }, + { + "epoch": 0.18593711314681852, + "grad_norm": 3.605266103347036, + "learning_rate": 2.8128108886227633e-06, + "loss": 0.5311, + "step": 4506 + }, + { + "epoch": 0.18597837748617643, + "grad_norm": 3.694445960059793, + "learning_rate": 2.8127138970052623e-06, + "loss": 0.5492, + "step": 4507 + }, + { + "epoch": 0.18601964182553438, + "grad_norm": 3.199996935191566, + "learning_rate": 2.8126168819394067e-06, + "loss": 0.5156, + "step": 4508 + }, + { + "epoch": 0.1860609061648923, + "grad_norm": 2.99587863787472, + "learning_rate": 2.8125198434269303e-06, + "loss": 0.5172, + "step": 4509 + }, + { + "epoch": 0.18610217050425024, + "grad_norm": 2.7328265857932856, + "learning_rate": 2.8124227814695657e-06, + "loss": 0.5522, + "step": 4510 + }, + { + "epoch": 0.18614343484360815, + "grad_norm": 2.8699435621962524, + "learning_rate": 2.8123256960690474e-06, + "loss": 0.5791, + "step": 4511 + }, + { + "epoch": 0.18618469918296607, + "grad_norm": 3.2367924443650806, + "learning_rate": 2.812228587227109e-06, + "loss": 0.5614, + "step": 4512 + }, + { + "epoch": 0.186225963522324, + "grad_norm": 7.264964403307644, + "learning_rate": 2.812131454945485e-06, + "loss": 0.5724, + "step": 4513 + }, + { + "epoch": 0.18626722786168193, + "grad_norm": 3.803272257910263, + "learning_rate": 2.8120342992259112e-06, + "loss": 0.5543, + "step": 4514 + }, + { + "epoch": 0.18630849220103987, + "grad_norm": 2.900339850922371, + "learning_rate": 2.8119371200701226e-06, + "loss": 0.5891, + "step": 4515 + }, + { + "epoch": 0.1863497565403978, + "grad_norm": 3.5471649792581648, + "learning_rate": 2.811839917479855e-06, + "loss": 0.5706, + "step": 4516 + }, + { + "epoch": 0.1863910208797557, + "grad_norm": 17.933385953691555, + "learning_rate": 2.8117426914568448e-06, + "loss": 0.558, + "step": 4517 + }, + { + "epoch": 0.18643228521911365, + "grad_norm": 3.06905797576597, + "learning_rate": 2.811645442002829e-06, + "loss": 0.5362, + "step": 4518 + }, + { + "epoch": 0.18647354955847156, + "grad_norm": 2.9242093665230007, + "learning_rate": 2.8115481691195437e-06, + "loss": 0.5671, + "step": 4519 + }, + { + "epoch": 0.1865148138978295, + "grad_norm": 6.969844874960367, + "learning_rate": 2.8114508728087273e-06, + "loss": 0.5724, + "step": 4520 + }, + { + "epoch": 0.18655607823718742, + "grad_norm": 3.595075507065001, + "learning_rate": 2.811353553072118e-06, + "loss": 0.5494, + "step": 4521 + }, + { + "epoch": 0.18659734257654534, + "grad_norm": 3.2715951503061325, + "learning_rate": 2.811256209911453e-06, + "loss": 0.5957, + "step": 4522 + }, + { + "epoch": 0.18663860691590328, + "grad_norm": 2.8405371151895986, + "learning_rate": 2.811158843328472e-06, + "loss": 0.5564, + "step": 4523 + }, + { + "epoch": 0.1866798712552612, + "grad_norm": 3.215289674501209, + "learning_rate": 2.8110614533249145e-06, + "loss": 0.6057, + "step": 4524 + }, + { + "epoch": 0.18672113559461914, + "grad_norm": 2.0649493534794963, + "learning_rate": 2.810964039902519e-06, + "loss": 0.5286, + "step": 4525 + }, + { + "epoch": 0.18676239993397706, + "grad_norm": 4.311220797051815, + "learning_rate": 2.810866603063027e-06, + "loss": 0.5635, + "step": 4526 + }, + { + "epoch": 0.18680366427333497, + "grad_norm": 3.8865770228348007, + "learning_rate": 2.8107691428081778e-06, + "loss": 0.5613, + "step": 4527 + }, + { + "epoch": 0.18684492861269292, + "grad_norm": 3.4898711688857675, + "learning_rate": 2.810671659139713e-06, + "loss": 0.5566, + "step": 4528 + }, + { + "epoch": 0.18688619295205083, + "grad_norm": 3.945517461715116, + "learning_rate": 2.8105741520593733e-06, + "loss": 0.5462, + "step": 4529 + }, + { + "epoch": 0.18692745729140878, + "grad_norm": 2.495790782645932, + "learning_rate": 2.8104766215689e-06, + "loss": 0.5709, + "step": 4530 + }, + { + "epoch": 0.1869687216307667, + "grad_norm": 4.545404894180131, + "learning_rate": 2.8103790676700367e-06, + "loss": 0.5708, + "step": 4531 + }, + { + "epoch": 0.1870099859701246, + "grad_norm": 3.2466540350771838, + "learning_rate": 2.8102814903645253e-06, + "loss": 0.5911, + "step": 4532 + }, + { + "epoch": 0.18705125030948255, + "grad_norm": 7.953696194545796, + "learning_rate": 2.810183889654108e-06, + "loss": 0.5819, + "step": 4533 + }, + { + "epoch": 0.18709251464884047, + "grad_norm": 4.845950087704659, + "learning_rate": 2.81008626554053e-06, + "loss": 0.5387, + "step": 4534 + }, + { + "epoch": 0.1871337789881984, + "grad_norm": 2.9181028000156526, + "learning_rate": 2.8099886180255327e-06, + "loss": 0.5547, + "step": 4535 + }, + { + "epoch": 0.18717504332755633, + "grad_norm": 2.538714302497037, + "learning_rate": 2.8098909471108625e-06, + "loss": 0.5531, + "step": 4536 + }, + { + "epoch": 0.18721630766691424, + "grad_norm": 3.1755809394135643, + "learning_rate": 2.8097932527982628e-06, + "loss": 0.5532, + "step": 4537 + }, + { + "epoch": 0.1872575720062722, + "grad_norm": 3.6641246254511173, + "learning_rate": 2.8096955350894793e-06, + "loss": 0.5661, + "step": 4538 + }, + { + "epoch": 0.1872988363456301, + "grad_norm": 2.330041762548601, + "learning_rate": 2.8095977939862565e-06, + "loss": 0.5871, + "step": 4539 + }, + { + "epoch": 0.18734010068498802, + "grad_norm": 3.9297841239404967, + "learning_rate": 2.8095000294903414e-06, + "loss": 0.5553, + "step": 4540 + }, + { + "epoch": 0.18738136502434596, + "grad_norm": 2.9717070916846184, + "learning_rate": 2.8094022416034798e-06, + "loss": 0.5516, + "step": 4541 + }, + { + "epoch": 0.18742262936370388, + "grad_norm": 2.092539277572058, + "learning_rate": 2.8093044303274186e-06, + "loss": 0.5845, + "step": 4542 + }, + { + "epoch": 0.18746389370306182, + "grad_norm": 3.7175746639324942, + "learning_rate": 2.809206595663905e-06, + "loss": 0.6099, + "step": 4543 + }, + { + "epoch": 0.18750515804241974, + "grad_norm": 2.7623933653374926, + "learning_rate": 2.809108737614686e-06, + "loss": 0.6107, + "step": 4544 + }, + { + "epoch": 0.18754642238177766, + "grad_norm": 2.505176469801633, + "learning_rate": 2.8090108561815104e-06, + "loss": 0.4911, + "step": 4545 + }, + { + "epoch": 0.1875876867211356, + "grad_norm": 2.3719813399017537, + "learning_rate": 2.8089129513661266e-06, + "loss": 0.4929, + "step": 4546 + }, + { + "epoch": 0.18762895106049352, + "grad_norm": 3.1982707328610287, + "learning_rate": 2.808815023170283e-06, + "loss": 0.5797, + "step": 4547 + }, + { + "epoch": 0.18767021539985146, + "grad_norm": 2.1325386527947643, + "learning_rate": 2.808717071595728e-06, + "loss": 0.5327, + "step": 4548 + }, + { + "epoch": 0.18771147973920937, + "grad_norm": 2.8634413924716857, + "learning_rate": 2.808619096644213e-06, + "loss": 0.6091, + "step": 4549 + }, + { + "epoch": 0.1877527440785673, + "grad_norm": 2.7876888576562995, + "learning_rate": 2.8085210983174874e-06, + "loss": 0.5625, + "step": 4550 + }, + { + "epoch": 0.18779400841792523, + "grad_norm": 3.1020725839096728, + "learning_rate": 2.8084230766173015e-06, + "loss": 0.6169, + "step": 4551 + }, + { + "epoch": 0.18783527275728315, + "grad_norm": 2.7371010160770433, + "learning_rate": 2.8083250315454063e-06, + "loss": 0.6382, + "step": 4552 + }, + { + "epoch": 0.1878765370966411, + "grad_norm": 2.3889954757995233, + "learning_rate": 2.808226963103553e-06, + "loss": 0.5244, + "step": 4553 + }, + { + "epoch": 0.187917801435999, + "grad_norm": 2.748638534694857, + "learning_rate": 2.808128871293493e-06, + "loss": 0.5825, + "step": 4554 + }, + { + "epoch": 0.18795906577535693, + "grad_norm": 2.9700432266806773, + "learning_rate": 2.8080307561169793e-06, + "loss": 0.5847, + "step": 4555 + }, + { + "epoch": 0.18800033011471487, + "grad_norm": 3.2108562559790568, + "learning_rate": 2.807932617575764e-06, + "loss": 0.5724, + "step": 4556 + }, + { + "epoch": 0.18804159445407279, + "grad_norm": 2.248324443869408, + "learning_rate": 2.8078344556716003e-06, + "loss": 0.5215, + "step": 4557 + }, + { + "epoch": 0.18808285879343073, + "grad_norm": 2.0107647720920814, + "learning_rate": 2.8077362704062415e-06, + "loss": 0.5501, + "step": 4558 + }, + { + "epoch": 0.18812412313278865, + "grad_norm": 5.463708175206399, + "learning_rate": 2.807638061781442e-06, + "loss": 0.5819, + "step": 4559 + }, + { + "epoch": 0.18816538747214656, + "grad_norm": 3.194486941165763, + "learning_rate": 2.8075398297989547e-06, + "loss": 0.5569, + "step": 4560 + }, + { + "epoch": 0.1882066518115045, + "grad_norm": 19.386594481488313, + "learning_rate": 2.8074415744605355e-06, + "loss": 0.538, + "step": 4561 + }, + { + "epoch": 0.18824791615086242, + "grad_norm": 6.220432018207692, + "learning_rate": 2.807343295767939e-06, + "loss": 0.5456, + "step": 4562 + }, + { + "epoch": 0.18828918049022036, + "grad_norm": 4.281751651721715, + "learning_rate": 2.8072449937229206e-06, + "loss": 0.5588, + "step": 4563 + }, + { + "epoch": 0.18833044482957828, + "grad_norm": 3.279039931466683, + "learning_rate": 2.8071466683272364e-06, + "loss": 0.6075, + "step": 4564 + }, + { + "epoch": 0.1883717091689362, + "grad_norm": 2.8091573735326327, + "learning_rate": 2.8070483195826426e-06, + "loss": 0.5806, + "step": 4565 + }, + { + "epoch": 0.18841297350829414, + "grad_norm": 3.208729378473957, + "learning_rate": 2.8069499474908965e-06, + "loss": 0.5682, + "step": 4566 + }, + { + "epoch": 0.18845423784765206, + "grad_norm": 2.7869761564189366, + "learning_rate": 2.8068515520537546e-06, + "loss": 0.5191, + "step": 4567 + }, + { + "epoch": 0.18849550218700997, + "grad_norm": 52.16622364508694, + "learning_rate": 2.8067531332729747e-06, + "loss": 0.5501, + "step": 4568 + }, + { + "epoch": 0.18853676652636792, + "grad_norm": 2.5529001030168366, + "learning_rate": 2.806654691150315e-06, + "loss": 0.5167, + "step": 4569 + }, + { + "epoch": 0.18857803086572583, + "grad_norm": 4.586361271683833, + "learning_rate": 2.806556225687534e-06, + "loss": 0.5881, + "step": 4570 + }, + { + "epoch": 0.18861929520508378, + "grad_norm": 3.081443997497139, + "learning_rate": 2.80645773688639e-06, + "loss": 0.5581, + "step": 4571 + }, + { + "epoch": 0.1886605595444417, + "grad_norm": 3.6572779894187653, + "learning_rate": 2.8063592247486423e-06, + "loss": 0.5953, + "step": 4572 + }, + { + "epoch": 0.1887018238837996, + "grad_norm": 15.392911588863784, + "learning_rate": 2.806260689276051e-06, + "loss": 0.5486, + "step": 4573 + }, + { + "epoch": 0.18874308822315755, + "grad_norm": 9.236280178423796, + "learning_rate": 2.806162130470376e-06, + "loss": 0.5176, + "step": 4574 + }, + { + "epoch": 0.18878435256251547, + "grad_norm": 4.3728038950793335, + "learning_rate": 2.8060635483333782e-06, + "loss": 0.5294, + "step": 4575 + }, + { + "epoch": 0.1888256169018734, + "grad_norm": 4.034707585050791, + "learning_rate": 2.8059649428668175e-06, + "loss": 0.5665, + "step": 4576 + }, + { + "epoch": 0.18886688124123133, + "grad_norm": 4.049566698355898, + "learning_rate": 2.8058663140724566e-06, + "loss": 0.5535, + "step": 4577 + }, + { + "epoch": 0.18890814558058924, + "grad_norm": 7.987006769295052, + "learning_rate": 2.8057676619520563e-06, + "loss": 0.5394, + "step": 4578 + }, + { + "epoch": 0.1889494099199472, + "grad_norm": 7.249466563757644, + "learning_rate": 2.805668986507379e-06, + "loss": 0.617, + "step": 4579 + }, + { + "epoch": 0.1889906742593051, + "grad_norm": 2.6790300773889997, + "learning_rate": 2.805570287740187e-06, + "loss": 0.543, + "step": 4580 + }, + { + "epoch": 0.18903193859866305, + "grad_norm": 3.4354124073694234, + "learning_rate": 2.8054715656522443e-06, + "loss": 0.5309, + "step": 4581 + }, + { + "epoch": 0.18907320293802096, + "grad_norm": 2.489650704206752, + "learning_rate": 2.8053728202453134e-06, + "loss": 0.5267, + "step": 4582 + }, + { + "epoch": 0.18911446727737888, + "grad_norm": 3.3432017446805853, + "learning_rate": 2.805274051521158e-06, + "loss": 0.5283, + "step": 4583 + }, + { + "epoch": 0.18915573161673682, + "grad_norm": 4.416686962859137, + "learning_rate": 2.805175259481543e-06, + "loss": 0.5971, + "step": 4584 + }, + { + "epoch": 0.18919699595609474, + "grad_norm": 3.931709230740486, + "learning_rate": 2.805076444128233e-06, + "loss": 0.5924, + "step": 4585 + }, + { + "epoch": 0.18923826029545268, + "grad_norm": 4.450710643825685, + "learning_rate": 2.8049776054629927e-06, + "loss": 0.5508, + "step": 4586 + }, + { + "epoch": 0.1892795246348106, + "grad_norm": 2.7462582714398125, + "learning_rate": 2.804878743487588e-06, + "loss": 0.544, + "step": 4587 + }, + { + "epoch": 0.1893207889741685, + "grad_norm": 2.101107191802698, + "learning_rate": 2.804779858203784e-06, + "loss": 0.5359, + "step": 4588 + }, + { + "epoch": 0.18936205331352646, + "grad_norm": 3.6236678568824923, + "learning_rate": 2.8046809496133483e-06, + "loss": 0.5485, + "step": 4589 + }, + { + "epoch": 0.18940331765288437, + "grad_norm": 5.084838538112691, + "learning_rate": 2.804582017718047e-06, + "loss": 0.6471, + "step": 4590 + }, + { + "epoch": 0.18944458199224232, + "grad_norm": 3.329907462325286, + "learning_rate": 2.8044830625196473e-06, + "loss": 0.5203, + "step": 4591 + }, + { + "epoch": 0.18948584633160023, + "grad_norm": 3.3881430011186566, + "learning_rate": 2.8043840840199164e-06, + "loss": 0.573, + "step": 4592 + }, + { + "epoch": 0.18952711067095815, + "grad_norm": 2.5484727960813647, + "learning_rate": 2.804285082220623e-06, + "loss": 0.5351, + "step": 4593 + }, + { + "epoch": 0.1895683750103161, + "grad_norm": 3.16369560988201, + "learning_rate": 2.8041860571235354e-06, + "loss": 0.6076, + "step": 4594 + }, + { + "epoch": 0.189609639349674, + "grad_norm": 4.397172762685989, + "learning_rate": 2.8040870087304218e-06, + "loss": 0.5851, + "step": 4595 + }, + { + "epoch": 0.18965090368903195, + "grad_norm": 4.113374873714489, + "learning_rate": 2.8039879370430525e-06, + "loss": 0.5969, + "step": 4596 + }, + { + "epoch": 0.18969216802838987, + "grad_norm": 5.4731873713261745, + "learning_rate": 2.8038888420631958e-06, + "loss": 0.5454, + "step": 4597 + }, + { + "epoch": 0.18973343236774778, + "grad_norm": 7.710979669449735, + "learning_rate": 2.803789723792623e-06, + "loss": 0.5402, + "step": 4598 + }, + { + "epoch": 0.18977469670710573, + "grad_norm": 4.696257051542239, + "learning_rate": 2.803690582233104e-06, + "loss": 0.56, + "step": 4599 + }, + { + "epoch": 0.18981596104646364, + "grad_norm": 2.1336449078475144, + "learning_rate": 2.80359141738641e-06, + "loss": 0.5282, + "step": 4600 + }, + { + "epoch": 0.18985722538582156, + "grad_norm": 2.8945736826915103, + "learning_rate": 2.8034922292543123e-06, + "loss": 0.5937, + "step": 4601 + }, + { + "epoch": 0.1898984897251795, + "grad_norm": 3.103830475803463, + "learning_rate": 2.803393017838582e-06, + "loss": 0.5325, + "step": 4602 + }, + { + "epoch": 0.18993975406453742, + "grad_norm": 3.420978806093393, + "learning_rate": 2.8032937831409924e-06, + "loss": 0.5718, + "step": 4603 + }, + { + "epoch": 0.18998101840389536, + "grad_norm": 4.403158117698581, + "learning_rate": 2.803194525163315e-06, + "loss": 0.5942, + "step": 4604 + }, + { + "epoch": 0.19002228274325328, + "grad_norm": 5.596562496598718, + "learning_rate": 2.803095243907324e-06, + "loss": 0.58, + "step": 4605 + }, + { + "epoch": 0.1900635470826112, + "grad_norm": 2.566495237958963, + "learning_rate": 2.8029959393747916e-06, + "loss": 0.5322, + "step": 4606 + }, + { + "epoch": 0.19010481142196914, + "grad_norm": 3.1222683560051303, + "learning_rate": 2.802896611567492e-06, + "loss": 0.5945, + "step": 4607 + }, + { + "epoch": 0.19014607576132705, + "grad_norm": 4.8085303647300215, + "learning_rate": 2.8027972604871997e-06, + "loss": 0.6487, + "step": 4608 + }, + { + "epoch": 0.190187340100685, + "grad_norm": 8.060727447936879, + "learning_rate": 2.8026978861356896e-06, + "loss": 0.616, + "step": 4609 + }, + { + "epoch": 0.19022860444004291, + "grad_norm": 2.545436671483488, + "learning_rate": 2.802598488514736e-06, + "loss": 0.573, + "step": 4610 + }, + { + "epoch": 0.19026986877940083, + "grad_norm": 4.001061494449794, + "learning_rate": 2.8024990676261153e-06, + "loss": 0.628, + "step": 4611 + }, + { + "epoch": 0.19031113311875877, + "grad_norm": 3.4221153650698586, + "learning_rate": 2.802399623471602e-06, + "loss": 0.5095, + "step": 4612 + }, + { + "epoch": 0.1903523974581167, + "grad_norm": 3.6729129127928086, + "learning_rate": 2.8023001560529743e-06, + "loss": 0.6253, + "step": 4613 + }, + { + "epoch": 0.19039366179747463, + "grad_norm": 3.608376300467388, + "learning_rate": 2.8022006653720074e-06, + "loss": 0.5666, + "step": 4614 + }, + { + "epoch": 0.19043492613683255, + "grad_norm": 2.4565709624543572, + "learning_rate": 2.802101151430479e-06, + "loss": 0.6222, + "step": 4615 + }, + { + "epoch": 0.19047619047619047, + "grad_norm": 3.0906907362036797, + "learning_rate": 2.802001614230167e-06, + "loss": 0.5509, + "step": 4616 + }, + { + "epoch": 0.1905174548155484, + "grad_norm": 5.281432162098514, + "learning_rate": 2.8019020537728485e-06, + "loss": 0.474, + "step": 4617 + }, + { + "epoch": 0.19055871915490633, + "grad_norm": 3.9182135939701936, + "learning_rate": 2.8018024700603027e-06, + "loss": 0.5278, + "step": 4618 + }, + { + "epoch": 0.19059998349426427, + "grad_norm": 8.848272315721447, + "learning_rate": 2.8017028630943083e-06, + "loss": 0.6006, + "step": 4619 + }, + { + "epoch": 0.19064124783362218, + "grad_norm": 2.920648621935107, + "learning_rate": 2.8016032328766442e-06, + "loss": 0.5851, + "step": 4620 + }, + { + "epoch": 0.1906825121729801, + "grad_norm": 5.188079436839679, + "learning_rate": 2.8015035794090904e-06, + "loss": 0.55, + "step": 4621 + }, + { + "epoch": 0.19072377651233804, + "grad_norm": 2.102169725603245, + "learning_rate": 2.801403902693427e-06, + "loss": 0.604, + "step": 4622 + }, + { + "epoch": 0.19076504085169596, + "grad_norm": 4.397615226545161, + "learning_rate": 2.801304202731434e-06, + "loss": 0.5415, + "step": 4623 + }, + { + "epoch": 0.1908063051910539, + "grad_norm": 2.263280074901215, + "learning_rate": 2.8012044795248926e-06, + "loss": 0.5206, + "step": 4624 + }, + { + "epoch": 0.19084756953041182, + "grad_norm": 4.4850696188217425, + "learning_rate": 2.801104733075584e-06, + "loss": 0.5684, + "step": 4625 + }, + { + "epoch": 0.19088883386976974, + "grad_norm": 3.5496044245153406, + "learning_rate": 2.8010049633852906e-06, + "loss": 0.5712, + "step": 4626 + }, + { + "epoch": 0.19093009820912768, + "grad_norm": 6.185302159646227, + "learning_rate": 2.8009051704557936e-06, + "loss": 0.5274, + "step": 4627 + }, + { + "epoch": 0.1909713625484856, + "grad_norm": 2.822029019958355, + "learning_rate": 2.8008053542888756e-06, + "loss": 0.5123, + "step": 4628 + }, + { + "epoch": 0.1910126268878435, + "grad_norm": 4.0083194325196505, + "learning_rate": 2.80070551488632e-06, + "loss": 0.5377, + "step": 4629 + }, + { + "epoch": 0.19105389122720146, + "grad_norm": 1.8817680339143743, + "learning_rate": 2.80060565224991e-06, + "loss": 0.5288, + "step": 4630 + }, + { + "epoch": 0.19109515556655937, + "grad_norm": 2.5142109342273393, + "learning_rate": 2.8005057663814296e-06, + "loss": 0.5999, + "step": 4631 + }, + { + "epoch": 0.19113641990591732, + "grad_norm": 2.2319639404020366, + "learning_rate": 2.800405857282663e-06, + "loss": 0.5357, + "step": 4632 + }, + { + "epoch": 0.19117768424527523, + "grad_norm": 2.501716628735954, + "learning_rate": 2.8003059249553943e-06, + "loss": 0.5645, + "step": 4633 + }, + { + "epoch": 0.19121894858463315, + "grad_norm": 5.372766115737189, + "learning_rate": 2.8002059694014087e-06, + "loss": 0.5221, + "step": 4634 + }, + { + "epoch": 0.1912602129239911, + "grad_norm": 2.2605338621997935, + "learning_rate": 2.8001059906224926e-06, + "loss": 0.5122, + "step": 4635 + }, + { + "epoch": 0.191301477263349, + "grad_norm": 3.1951343292621335, + "learning_rate": 2.800005988620431e-06, + "loss": 0.5594, + "step": 4636 + }, + { + "epoch": 0.19134274160270695, + "grad_norm": 8.027880445705277, + "learning_rate": 2.7999059633970096e-06, + "loss": 0.6217, + "step": 4637 + }, + { + "epoch": 0.19138400594206487, + "grad_norm": 2.8712486047005634, + "learning_rate": 2.7998059149540165e-06, + "loss": 0.5654, + "step": 4638 + }, + { + "epoch": 0.19142527028142278, + "grad_norm": 4.787675936142435, + "learning_rate": 2.799705843293238e-06, + "loss": 0.5447, + "step": 4639 + }, + { + "epoch": 0.19146653462078073, + "grad_norm": 2.9834067274040166, + "learning_rate": 2.7996057484164615e-06, + "loss": 0.565, + "step": 4640 + }, + { + "epoch": 0.19150779896013864, + "grad_norm": 3.4069589205070807, + "learning_rate": 2.7995056303254753e-06, + "loss": 0.5624, + "step": 4641 + }, + { + "epoch": 0.19154906329949659, + "grad_norm": 5.7022685230885015, + "learning_rate": 2.799405489022068e-06, + "loss": 0.5497, + "step": 4642 + }, + { + "epoch": 0.1915903276388545, + "grad_norm": 3.1193170632706453, + "learning_rate": 2.799305324508028e-06, + "loss": 0.6018, + "step": 4643 + }, + { + "epoch": 0.19163159197821242, + "grad_norm": 7.039283246586502, + "learning_rate": 2.7992051367851443e-06, + "loss": 0.5644, + "step": 4644 + }, + { + "epoch": 0.19167285631757036, + "grad_norm": 4.742500939810731, + "learning_rate": 2.799104925855207e-06, + "loss": 0.5454, + "step": 4645 + }, + { + "epoch": 0.19171412065692828, + "grad_norm": 3.1113617787499703, + "learning_rate": 2.7990046917200055e-06, + "loss": 0.6215, + "step": 4646 + }, + { + "epoch": 0.19175538499628622, + "grad_norm": 3.160521928376634, + "learning_rate": 2.7989044343813306e-06, + "loss": 0.57, + "step": 4647 + }, + { + "epoch": 0.19179664933564414, + "grad_norm": 3.9153271696859346, + "learning_rate": 2.7988041538409736e-06, + "loss": 0.5061, + "step": 4648 + }, + { + "epoch": 0.19183791367500205, + "grad_norm": 2.7335859299394465, + "learning_rate": 2.798703850100725e-06, + "loss": 0.615, + "step": 4649 + }, + { + "epoch": 0.19187917801436, + "grad_norm": 23.505384551126095, + "learning_rate": 2.7986035231623766e-06, + "loss": 0.5792, + "step": 4650 + }, + { + "epoch": 0.1919204423537179, + "grad_norm": 3.5873620546578184, + "learning_rate": 2.7985031730277212e-06, + "loss": 0.5859, + "step": 4651 + }, + { + "epoch": 0.19196170669307586, + "grad_norm": 2.7592625394441117, + "learning_rate": 2.7984027996985505e-06, + "loss": 0.5044, + "step": 4652 + }, + { + "epoch": 0.19200297103243377, + "grad_norm": 1.80452921430725, + "learning_rate": 2.7983024031766575e-06, + "loss": 0.5593, + "step": 4653 + }, + { + "epoch": 0.1920442353717917, + "grad_norm": 6.5128730277812785, + "learning_rate": 2.7982019834638363e-06, + "loss": 0.5518, + "step": 4654 + }, + { + "epoch": 0.19208549971114963, + "grad_norm": 5.304176677513646, + "learning_rate": 2.798101540561879e-06, + "loss": 0.6002, + "step": 4655 + }, + { + "epoch": 0.19212676405050755, + "grad_norm": 9.155631401344607, + "learning_rate": 2.7980010744725817e-06, + "loss": 0.5984, + "step": 4656 + }, + { + "epoch": 0.1921680283898655, + "grad_norm": 3.587563840042711, + "learning_rate": 2.797900585197738e-06, + "loss": 0.5803, + "step": 4657 + }, + { + "epoch": 0.1922092927292234, + "grad_norm": 1.9179825894964224, + "learning_rate": 2.797800072739143e-06, + "loss": 0.5091, + "step": 4658 + }, + { + "epoch": 0.19225055706858132, + "grad_norm": 2.702185820096192, + "learning_rate": 2.797699537098592e-06, + "loss": 0.569, + "step": 4659 + }, + { + "epoch": 0.19229182140793927, + "grad_norm": 2.2101861811958523, + "learning_rate": 2.7975989782778806e-06, + "loss": 0.5495, + "step": 4660 + }, + { + "epoch": 0.19233308574729718, + "grad_norm": 2.635976363355609, + "learning_rate": 2.797498396278806e-06, + "loss": 0.5708, + "step": 4661 + }, + { + "epoch": 0.1923743500866551, + "grad_norm": 3.0816285780584876, + "learning_rate": 2.7973977911031638e-06, + "loss": 0.5629, + "step": 4662 + }, + { + "epoch": 0.19241561442601304, + "grad_norm": 2.739897894732682, + "learning_rate": 2.7972971627527514e-06, + "loss": 0.523, + "step": 4663 + }, + { + "epoch": 0.19245687876537096, + "grad_norm": 3.4777958701608056, + "learning_rate": 2.7971965112293666e-06, + "loss": 0.6184, + "step": 4664 + }, + { + "epoch": 0.1924981431047289, + "grad_norm": 2.556235560724659, + "learning_rate": 2.797095836534807e-06, + "loss": 0.5328, + "step": 4665 + }, + { + "epoch": 0.19253940744408682, + "grad_norm": 2.859104922716205, + "learning_rate": 2.7969951386708704e-06, + "loss": 0.5529, + "step": 4666 + }, + { + "epoch": 0.19258067178344473, + "grad_norm": 3.383621033530622, + "learning_rate": 2.796894417639357e-06, + "loss": 0.5497, + "step": 4667 + }, + { + "epoch": 0.19262193612280268, + "grad_norm": 2.1257412838624496, + "learning_rate": 2.7967936734420642e-06, + "loss": 0.5661, + "step": 4668 + }, + { + "epoch": 0.1926632004621606, + "grad_norm": 5.33597836589384, + "learning_rate": 2.7966929060807923e-06, + "loss": 0.5683, + "step": 4669 + }, + { + "epoch": 0.19270446480151854, + "grad_norm": 4.711056312935485, + "learning_rate": 2.7965921155573417e-06, + "loss": 0.5712, + "step": 4670 + }, + { + "epoch": 0.19274572914087645, + "grad_norm": 2.7387214249669154, + "learning_rate": 2.796491301873512e-06, + "loss": 0.589, + "step": 4671 + }, + { + "epoch": 0.19278699348023437, + "grad_norm": 2.8384744847286685, + "learning_rate": 2.7963904650311044e-06, + "loss": 0.5604, + "step": 4672 + }, + { + "epoch": 0.1928282578195923, + "grad_norm": 6.482785769516655, + "learning_rate": 2.79628960503192e-06, + "loss": 0.6295, + "step": 4673 + }, + { + "epoch": 0.19286952215895023, + "grad_norm": 10.821844685007639, + "learning_rate": 2.7961887218777604e-06, + "loss": 0.5776, + "step": 4674 + }, + { + "epoch": 0.19291078649830817, + "grad_norm": 4.024817238930693, + "learning_rate": 2.7960878155704277e-06, + "loss": 0.5701, + "step": 4675 + }, + { + "epoch": 0.1929520508376661, + "grad_norm": 2.8673639707221747, + "learning_rate": 2.7959868861117246e-06, + "loss": 0.5654, + "step": 4676 + }, + { + "epoch": 0.192993315177024, + "grad_norm": 2.8783087938452354, + "learning_rate": 2.7958859335034534e-06, + "loss": 0.5237, + "step": 4677 + }, + { + "epoch": 0.19303457951638195, + "grad_norm": 3.91413225151599, + "learning_rate": 2.7957849577474176e-06, + "loss": 0.5869, + "step": 4678 + }, + { + "epoch": 0.19307584385573986, + "grad_norm": 4.7523970237057, + "learning_rate": 2.7956839588454213e-06, + "loss": 0.5753, + "step": 4679 + }, + { + "epoch": 0.1931171081950978, + "grad_norm": 4.231997475266554, + "learning_rate": 2.7955829367992674e-06, + "loss": 0.5808, + "step": 4680 + }, + { + "epoch": 0.19315837253445572, + "grad_norm": 2.5012098555956683, + "learning_rate": 2.795481891610762e-06, + "loss": 0.5208, + "step": 4681 + }, + { + "epoch": 0.19319963687381364, + "grad_norm": 5.7267847842763455, + "learning_rate": 2.7953808232817087e-06, + "loss": 0.5765, + "step": 4682 + }, + { + "epoch": 0.19324090121317158, + "grad_norm": 2.5654123218119778, + "learning_rate": 2.795279731813913e-06, + "loss": 0.513, + "step": 4683 + }, + { + "epoch": 0.1932821655525295, + "grad_norm": 3.3870929188016596, + "learning_rate": 2.795178617209182e-06, + "loss": 0.5623, + "step": 4684 + }, + { + "epoch": 0.19332342989188744, + "grad_norm": 2.964293782881274, + "learning_rate": 2.7950774794693205e-06, + "loss": 0.5247, + "step": 4685 + }, + { + "epoch": 0.19336469423124536, + "grad_norm": 3.1554137120280834, + "learning_rate": 2.794976318596135e-06, + "loss": 0.5637, + "step": 4686 + }, + { + "epoch": 0.19340595857060328, + "grad_norm": 8.482750452311981, + "learning_rate": 2.794875134591433e-06, + "loss": 0.5732, + "step": 4687 + }, + { + "epoch": 0.19344722290996122, + "grad_norm": 4.991461581758016, + "learning_rate": 2.794773927457022e-06, + "loss": 0.6057, + "step": 4688 + }, + { + "epoch": 0.19348848724931914, + "grad_norm": 3.9524507787441223, + "learning_rate": 2.7946726971947096e-06, + "loss": 0.5257, + "step": 4689 + }, + { + "epoch": 0.19352975158867705, + "grad_norm": 2.4350054481097962, + "learning_rate": 2.7945714438063044e-06, + "loss": 0.6228, + "step": 4690 + }, + { + "epoch": 0.193571015928035, + "grad_norm": 2.80178502290512, + "learning_rate": 2.794470167293614e-06, + "loss": 0.5289, + "step": 4691 + }, + { + "epoch": 0.1936122802673929, + "grad_norm": 13.591850654665926, + "learning_rate": 2.794368867658449e-06, + "loss": 0.5133, + "step": 4692 + }, + { + "epoch": 0.19365354460675085, + "grad_norm": 4.016201665559242, + "learning_rate": 2.794267544902617e-06, + "loss": 0.5446, + "step": 4693 + }, + { + "epoch": 0.19369480894610877, + "grad_norm": 2.028274975387261, + "learning_rate": 2.7941661990279294e-06, + "loss": 0.5322, + "step": 4694 + }, + { + "epoch": 0.1937360732854667, + "grad_norm": 3.13213330004178, + "learning_rate": 2.794064830036196e-06, + "loss": 0.5885, + "step": 4695 + }, + { + "epoch": 0.19377733762482463, + "grad_norm": 3.1026887437611603, + "learning_rate": 2.793963437929228e-06, + "loss": 0.6016, + "step": 4696 + }, + { + "epoch": 0.19381860196418255, + "grad_norm": 3.184080702481117, + "learning_rate": 2.7938620227088354e-06, + "loss": 0.5559, + "step": 4697 + }, + { + "epoch": 0.1938598663035405, + "grad_norm": 12.323390622357827, + "learning_rate": 2.79376058437683e-06, + "loss": 0.586, + "step": 4698 + }, + { + "epoch": 0.1939011306428984, + "grad_norm": 5.865630700459472, + "learning_rate": 2.7936591229350246e-06, + "loss": 0.4762, + "step": 4699 + }, + { + "epoch": 0.19394239498225632, + "grad_norm": 5.6314294214335385, + "learning_rate": 2.7935576383852307e-06, + "loss": 0.534, + "step": 4700 + }, + { + "epoch": 0.19398365932161427, + "grad_norm": 3.469602675215933, + "learning_rate": 2.793456130729262e-06, + "loss": 0.5731, + "step": 4701 + }, + { + "epoch": 0.19402492366097218, + "grad_norm": 4.010615028987521, + "learning_rate": 2.7933545999689303e-06, + "loss": 0.5667, + "step": 4702 + }, + { + "epoch": 0.19406618800033013, + "grad_norm": 3.302623712087456, + "learning_rate": 2.7932530461060508e-06, + "loss": 0.5051, + "step": 4703 + }, + { + "epoch": 0.19410745233968804, + "grad_norm": 6.861795666669608, + "learning_rate": 2.793151469142436e-06, + "loss": 0.6001, + "step": 4704 + }, + { + "epoch": 0.19414871667904596, + "grad_norm": 3.498282347965951, + "learning_rate": 2.793049869079901e-06, + "loss": 0.5534, + "step": 4705 + }, + { + "epoch": 0.1941899810184039, + "grad_norm": 2.0091537198421423, + "learning_rate": 2.7929482459202606e-06, + "loss": 0.5374, + "step": 4706 + }, + { + "epoch": 0.19423124535776182, + "grad_norm": 2.251269428125647, + "learning_rate": 2.7928465996653307e-06, + "loss": 0.5767, + "step": 4707 + }, + { + "epoch": 0.19427250969711976, + "grad_norm": 2.560197100007835, + "learning_rate": 2.792744930316926e-06, + "loss": 0.539, + "step": 4708 + }, + { + "epoch": 0.19431377403647768, + "grad_norm": 6.204182457141574, + "learning_rate": 2.7926432378768627e-06, + "loss": 0.6068, + "step": 4709 + }, + { + "epoch": 0.1943550383758356, + "grad_norm": 2.1441271517698124, + "learning_rate": 2.792541522346958e-06, + "loss": 0.545, + "step": 4710 + }, + { + "epoch": 0.19439630271519354, + "grad_norm": 2.512377892683494, + "learning_rate": 2.7924397837290274e-06, + "loss": 0.5487, + "step": 4711 + }, + { + "epoch": 0.19443756705455145, + "grad_norm": 8.181625456388291, + "learning_rate": 2.7923380220248896e-06, + "loss": 0.6055, + "step": 4712 + }, + { + "epoch": 0.1944788313939094, + "grad_norm": 3.0320191564149344, + "learning_rate": 2.7922362372363613e-06, + "loss": 0.5585, + "step": 4713 + }, + { + "epoch": 0.1945200957332673, + "grad_norm": 29.34399923464854, + "learning_rate": 2.7921344293652615e-06, + "loss": 0.5999, + "step": 4714 + }, + { + "epoch": 0.19456136007262523, + "grad_norm": 3.9082657622689814, + "learning_rate": 2.792032598413409e-06, + "loss": 0.5629, + "step": 4715 + }, + { + "epoch": 0.19460262441198317, + "grad_norm": 3.6748790441188017, + "learning_rate": 2.791930744382621e-06, + "loss": 0.5451, + "step": 4716 + }, + { + "epoch": 0.1946438887513411, + "grad_norm": 3.1811995637179056, + "learning_rate": 2.7918288672747185e-06, + "loss": 0.5554, + "step": 4717 + }, + { + "epoch": 0.19468515309069903, + "grad_norm": 2.5478372825764297, + "learning_rate": 2.791726967091521e-06, + "loss": 0.5531, + "step": 4718 + }, + { + "epoch": 0.19472641743005695, + "grad_norm": 4.155898832470874, + "learning_rate": 2.7916250438348484e-06, + "loss": 0.5584, + "step": 4719 + }, + { + "epoch": 0.19476768176941486, + "grad_norm": 3.6207264310843614, + "learning_rate": 2.7915230975065207e-06, + "loss": 0.5459, + "step": 4720 + }, + { + "epoch": 0.1948089461087728, + "grad_norm": 3.921223590932445, + "learning_rate": 2.79142112810836e-06, + "loss": 0.502, + "step": 4721 + }, + { + "epoch": 0.19485021044813072, + "grad_norm": 2.4068474830681224, + "learning_rate": 2.7913191356421877e-06, + "loss": 0.5381, + "step": 4722 + }, + { + "epoch": 0.19489147478748864, + "grad_norm": 1.9672342878550542, + "learning_rate": 2.791217120109825e-06, + "loss": 0.5853, + "step": 4723 + }, + { + "epoch": 0.19493273912684658, + "grad_norm": 2.3317977238899865, + "learning_rate": 2.7911150815130942e-06, + "loss": 0.545, + "step": 4724 + }, + { + "epoch": 0.1949740034662045, + "grad_norm": 12.407034489369265, + "learning_rate": 2.791013019853818e-06, + "loss": 0.5605, + "step": 4725 + }, + { + "epoch": 0.19501526780556244, + "grad_norm": 2.344746756612411, + "learning_rate": 2.7909109351338204e-06, + "loss": 0.5297, + "step": 4726 + }, + { + "epoch": 0.19505653214492036, + "grad_norm": 4.713476395782854, + "learning_rate": 2.7908088273549233e-06, + "loss": 0.5974, + "step": 4727 + }, + { + "epoch": 0.19509779648427827, + "grad_norm": 2.6210359097496574, + "learning_rate": 2.7907066965189516e-06, + "loss": 0.5849, + "step": 4728 + }, + { + "epoch": 0.19513906082363622, + "grad_norm": 2.9779132098705636, + "learning_rate": 2.79060454262773e-06, + "loss": 0.5207, + "step": 4729 + }, + { + "epoch": 0.19518032516299413, + "grad_norm": 2.8537086353675036, + "learning_rate": 2.7905023656830827e-06, + "loss": 0.5638, + "step": 4730 + }, + { + "epoch": 0.19522158950235208, + "grad_norm": 3.284667849458153, + "learning_rate": 2.790400165686834e-06, + "loss": 0.5813, + "step": 4731 + }, + { + "epoch": 0.19526285384171, + "grad_norm": 4.9501247260965, + "learning_rate": 2.7902979426408106e-06, + "loss": 0.568, + "step": 4732 + }, + { + "epoch": 0.1953041181810679, + "grad_norm": 120.52544298154018, + "learning_rate": 2.7901956965468386e-06, + "loss": 0.584, + "step": 4733 + }, + { + "epoch": 0.19534538252042585, + "grad_norm": 7.6031275525686315, + "learning_rate": 2.7900934274067437e-06, + "loss": 0.5533, + "step": 4734 + }, + { + "epoch": 0.19538664685978377, + "grad_norm": 3.868191668047631, + "learning_rate": 2.7899911352223526e-06, + "loss": 0.5072, + "step": 4735 + }, + { + "epoch": 0.1954279111991417, + "grad_norm": 4.3861459683451445, + "learning_rate": 2.789888819995493e-06, + "loss": 0.5085, + "step": 4736 + }, + { + "epoch": 0.19546917553849963, + "grad_norm": 3.516781506715734, + "learning_rate": 2.7897864817279917e-06, + "loss": 0.5798, + "step": 4737 + }, + { + "epoch": 0.19551043987785754, + "grad_norm": 3.289635600561637, + "learning_rate": 2.7896841204216777e-06, + "loss": 0.5661, + "step": 4738 + }, + { + "epoch": 0.1955517042172155, + "grad_norm": 3.004318427997923, + "learning_rate": 2.7895817360783787e-06, + "loss": 0.5287, + "step": 4739 + }, + { + "epoch": 0.1955929685565734, + "grad_norm": 8.926080503941012, + "learning_rate": 2.7894793286999246e-06, + "loss": 0.5516, + "step": 4740 + }, + { + "epoch": 0.19563423289593135, + "grad_norm": 3.3921024868090623, + "learning_rate": 2.7893768982881434e-06, + "loss": 0.566, + "step": 4741 + }, + { + "epoch": 0.19567549723528926, + "grad_norm": 5.582809103442459, + "learning_rate": 2.789274444844865e-06, + "loss": 0.5651, + "step": 4742 + }, + { + "epoch": 0.19571676157464718, + "grad_norm": 2.317532849187095, + "learning_rate": 2.7891719683719206e-06, + "loss": 0.5667, + "step": 4743 + }, + { + "epoch": 0.19575802591400512, + "grad_norm": 8.21891125610034, + "learning_rate": 2.789069468871139e-06, + "loss": 0.5229, + "step": 4744 + }, + { + "epoch": 0.19579929025336304, + "grad_norm": 3.0311657446543196, + "learning_rate": 2.788966946344352e-06, + "loss": 0.5513, + "step": 4745 + }, + { + "epoch": 0.19584055459272098, + "grad_norm": 3.0242758089554607, + "learning_rate": 2.7888644007933913e-06, + "loss": 0.5559, + "step": 4746 + }, + { + "epoch": 0.1958818189320789, + "grad_norm": 2.057225889702327, + "learning_rate": 2.788761832220088e-06, + "loss": 0.4951, + "step": 4747 + }, + { + "epoch": 0.19592308327143682, + "grad_norm": 4.464997182988909, + "learning_rate": 2.7886592406262746e-06, + "loss": 0.5986, + "step": 4748 + }, + { + "epoch": 0.19596434761079476, + "grad_norm": 4.322940239343022, + "learning_rate": 2.7885566260137828e-06, + "loss": 0.5661, + "step": 4749 + }, + { + "epoch": 0.19600561195015268, + "grad_norm": 3.4984758683396935, + "learning_rate": 2.7884539883844463e-06, + "loss": 0.5753, + "step": 4750 + }, + { + "epoch": 0.19604687628951062, + "grad_norm": 3.585506363739119, + "learning_rate": 2.788351327740099e-06, + "loss": 0.555, + "step": 4751 + }, + { + "epoch": 0.19608814062886853, + "grad_norm": 4.881899132651207, + "learning_rate": 2.7882486440825735e-06, + "loss": 0.5916, + "step": 4752 + }, + { + "epoch": 0.19612940496822645, + "grad_norm": 2.9077666112444556, + "learning_rate": 2.7881459374137046e-06, + "loss": 0.5567, + "step": 4753 + }, + { + "epoch": 0.1961706693075844, + "grad_norm": 5.048032626098667, + "learning_rate": 2.788043207735327e-06, + "loss": 0.611, + "step": 4754 + }, + { + "epoch": 0.1962119336469423, + "grad_norm": 2.6443812126038013, + "learning_rate": 2.7879404550492752e-06, + "loss": 0.5147, + "step": 4755 + }, + { + "epoch": 0.19625319798630023, + "grad_norm": 2.411486993515299, + "learning_rate": 2.787837679357385e-06, + "loss": 0.5518, + "step": 4756 + }, + { + "epoch": 0.19629446232565817, + "grad_norm": 3.4926533631171894, + "learning_rate": 2.7877348806614924e-06, + "loss": 0.6221, + "step": 4757 + }, + { + "epoch": 0.19633572666501609, + "grad_norm": 3.3731489857257477, + "learning_rate": 2.7876320589634333e-06, + "loss": 0.5055, + "step": 4758 + }, + { + "epoch": 0.19637699100437403, + "grad_norm": 8.153063531917644, + "learning_rate": 2.787529214265044e-06, + "loss": 0.4916, + "step": 4759 + }, + { + "epoch": 0.19641825534373195, + "grad_norm": 3.0717617082416093, + "learning_rate": 2.7874263465681624e-06, + "loss": 0.5985, + "step": 4760 + }, + { + "epoch": 0.19645951968308986, + "grad_norm": 3.1510478693721735, + "learning_rate": 2.787323455874626e-06, + "loss": 0.6271, + "step": 4761 + }, + { + "epoch": 0.1965007840224478, + "grad_norm": 2.9789120070748143, + "learning_rate": 2.7872205421862722e-06, + "loss": 0.5934, + "step": 4762 + }, + { + "epoch": 0.19654204836180572, + "grad_norm": 4.22025905523061, + "learning_rate": 2.787117605504939e-06, + "loss": 0.6047, + "step": 4763 + }, + { + "epoch": 0.19658331270116366, + "grad_norm": 2.7388990082091755, + "learning_rate": 2.7870146458324655e-06, + "loss": 0.5323, + "step": 4764 + }, + { + "epoch": 0.19662457704052158, + "grad_norm": 5.140561912694901, + "learning_rate": 2.7869116631706907e-06, + "loss": 0.608, + "step": 4765 + }, + { + "epoch": 0.1966658413798795, + "grad_norm": 2.0114821392732614, + "learning_rate": 2.7868086575214548e-06, + "loss": 0.5255, + "step": 4766 + }, + { + "epoch": 0.19670710571923744, + "grad_norm": 5.397190848824113, + "learning_rate": 2.7867056288865966e-06, + "loss": 0.5668, + "step": 4767 + }, + { + "epoch": 0.19674837005859536, + "grad_norm": 2.643759871724105, + "learning_rate": 2.7866025772679572e-06, + "loss": 0.5719, + "step": 4768 + }, + { + "epoch": 0.1967896343979533, + "grad_norm": 2.842004978550789, + "learning_rate": 2.7864995026673777e-06, + "loss": 0.5242, + "step": 4769 + }, + { + "epoch": 0.19683089873731122, + "grad_norm": 2.9088799320184533, + "learning_rate": 2.7863964050866977e-06, + "loss": 0.5061, + "step": 4770 + }, + { + "epoch": 0.19687216307666913, + "grad_norm": 2.669622523058138, + "learning_rate": 2.7862932845277605e-06, + "loss": 0.5601, + "step": 4771 + }, + { + "epoch": 0.19691342741602708, + "grad_norm": 9.3159378486984, + "learning_rate": 2.786190140992408e-06, + "loss": 0.5653, + "step": 4772 + }, + { + "epoch": 0.196954691755385, + "grad_norm": 3.4709392841493543, + "learning_rate": 2.7860869744824813e-06, + "loss": 0.5807, + "step": 4773 + }, + { + "epoch": 0.19699595609474294, + "grad_norm": 2.7090614351177553, + "learning_rate": 2.785983784999824e-06, + "loss": 0.5461, + "step": 4774 + }, + { + "epoch": 0.19703722043410085, + "grad_norm": 2.1004662403878607, + "learning_rate": 2.785880572546279e-06, + "loss": 0.5348, + "step": 4775 + }, + { + "epoch": 0.19707848477345877, + "grad_norm": 2.610875220142426, + "learning_rate": 2.785777337123691e-06, + "loss": 0.5124, + "step": 4776 + }, + { + "epoch": 0.1971197491128167, + "grad_norm": 2.4168014698457174, + "learning_rate": 2.785674078733902e-06, + "loss": 0.534, + "step": 4777 + }, + { + "epoch": 0.19716101345217463, + "grad_norm": 2.8631179960632585, + "learning_rate": 2.7855707973787587e-06, + "loss": 0.5154, + "step": 4778 + }, + { + "epoch": 0.19720227779153257, + "grad_norm": 10.141671217623617, + "learning_rate": 2.7854674930601046e-06, + "loss": 0.5986, + "step": 4779 + }, + { + "epoch": 0.1972435421308905, + "grad_norm": 2.7292118044036204, + "learning_rate": 2.7853641657797857e-06, + "loss": 0.5857, + "step": 4780 + }, + { + "epoch": 0.1972848064702484, + "grad_norm": 2.5531042696928745, + "learning_rate": 2.7852608155396465e-06, + "loss": 0.5407, + "step": 4781 + }, + { + "epoch": 0.19732607080960635, + "grad_norm": 3.0154549273467723, + "learning_rate": 2.7851574423415343e-06, + "loss": 0.5135, + "step": 4782 + }, + { + "epoch": 0.19736733514896426, + "grad_norm": 3.1646473683066607, + "learning_rate": 2.785054046187296e-06, + "loss": 0.602, + "step": 4783 + }, + { + "epoch": 0.19740859948832218, + "grad_norm": 2.8191389957178887, + "learning_rate": 2.784950627078777e-06, + "loss": 0.5663, + "step": 4784 + }, + { + "epoch": 0.19744986382768012, + "grad_norm": 2.574323138675318, + "learning_rate": 2.784847185017825e-06, + "loss": 0.6137, + "step": 4785 + }, + { + "epoch": 0.19749112816703804, + "grad_norm": 3.2052259529027904, + "learning_rate": 2.7847437200062888e-06, + "loss": 0.5475, + "step": 4786 + }, + { + "epoch": 0.19753239250639598, + "grad_norm": 4.4233929978964355, + "learning_rate": 2.7846402320460153e-06, + "loss": 0.536, + "step": 4787 + }, + { + "epoch": 0.1975736568457539, + "grad_norm": 2.8728630053168467, + "learning_rate": 2.784536721138854e-06, + "loss": 0.5973, + "step": 4788 + }, + { + "epoch": 0.1976149211851118, + "grad_norm": 3.804425173866229, + "learning_rate": 2.784433187286653e-06, + "loss": 0.5676, + "step": 4789 + }, + { + "epoch": 0.19765618552446976, + "grad_norm": 2.4279318779969388, + "learning_rate": 2.7843296304912625e-06, + "loss": 0.5721, + "step": 4790 + }, + { + "epoch": 0.19769744986382767, + "grad_norm": 2.887820578629831, + "learning_rate": 2.7842260507545312e-06, + "loss": 0.4794, + "step": 4791 + }, + { + "epoch": 0.19773871420318562, + "grad_norm": 3.309100903728444, + "learning_rate": 2.7841224480783106e-06, + "loss": 0.5365, + "step": 4792 + }, + { + "epoch": 0.19777997854254353, + "grad_norm": 3.4698648137054584, + "learning_rate": 2.784018822464451e-06, + "loss": 0.6065, + "step": 4793 + }, + { + "epoch": 0.19782124288190145, + "grad_norm": 5.49327432644734, + "learning_rate": 2.7839151739148024e-06, + "loss": 0.5572, + "step": 4794 + }, + { + "epoch": 0.1978625072212594, + "grad_norm": 2.580612973705017, + "learning_rate": 2.783811502431217e-06, + "loss": 0.515, + "step": 4795 + }, + { + "epoch": 0.1979037715606173, + "grad_norm": 2.647271526278341, + "learning_rate": 2.7837078080155474e-06, + "loss": 0.5661, + "step": 4796 + }, + { + "epoch": 0.19794503589997525, + "grad_norm": 4.13784809491962, + "learning_rate": 2.7836040906696445e-06, + "loss": 0.5827, + "step": 4797 + }, + { + "epoch": 0.19798630023933317, + "grad_norm": 2.747845291550498, + "learning_rate": 2.7835003503953614e-06, + "loss": 0.5963, + "step": 4798 + }, + { + "epoch": 0.19802756457869108, + "grad_norm": 4.5206407785863805, + "learning_rate": 2.783396587194551e-06, + "loss": 0.5536, + "step": 4799 + }, + { + "epoch": 0.19806882891804903, + "grad_norm": 11.956447786437183, + "learning_rate": 2.783292801069067e-06, + "loss": 0.5882, + "step": 4800 + }, + { + "epoch": 0.19811009325740694, + "grad_norm": 1.6808574096907025, + "learning_rate": 2.7831889920207635e-06, + "loss": 0.5101, + "step": 4801 + }, + { + "epoch": 0.1981513575967649, + "grad_norm": 4.3007220699070645, + "learning_rate": 2.7830851600514946e-06, + "loss": 0.531, + "step": 4802 + }, + { + "epoch": 0.1981926219361228, + "grad_norm": 2.2331047086933795, + "learning_rate": 2.782981305163115e-06, + "loss": 0.508, + "step": 4803 + }, + { + "epoch": 0.19823388627548072, + "grad_norm": 2.342049571654226, + "learning_rate": 2.7828774273574797e-06, + "loss": 0.5781, + "step": 4804 + }, + { + "epoch": 0.19827515061483866, + "grad_norm": 2.8852822084894894, + "learning_rate": 2.7827735266364438e-06, + "loss": 0.5404, + "step": 4805 + }, + { + "epoch": 0.19831641495419658, + "grad_norm": 2.839546825868918, + "learning_rate": 2.7826696030018643e-06, + "loss": 0.6034, + "step": 4806 + }, + { + "epoch": 0.19835767929355452, + "grad_norm": 3.4953757928811044, + "learning_rate": 2.7825656564555963e-06, + "loss": 0.6172, + "step": 4807 + }, + { + "epoch": 0.19839894363291244, + "grad_norm": 2.6124840557996074, + "learning_rate": 2.782461686999498e-06, + "loss": 0.5519, + "step": 4808 + }, + { + "epoch": 0.19844020797227035, + "grad_norm": 2.4403435517143444, + "learning_rate": 2.7823576946354254e-06, + "loss": 0.5007, + "step": 4809 + }, + { + "epoch": 0.1984814723116283, + "grad_norm": 2.1108580073796936, + "learning_rate": 2.782253679365236e-06, + "loss": 0.5433, + "step": 4810 + }, + { + "epoch": 0.19852273665098621, + "grad_norm": 1.8059953202432806, + "learning_rate": 2.7821496411907886e-06, + "loss": 0.5405, + "step": 4811 + }, + { + "epoch": 0.19856400099034416, + "grad_norm": 6.283965686903589, + "learning_rate": 2.782045580113941e-06, + "loss": 0.5427, + "step": 4812 + }, + { + "epoch": 0.19860526532970207, + "grad_norm": 3.255727815730607, + "learning_rate": 2.7819414961365525e-06, + "loss": 0.5452, + "step": 4813 + }, + { + "epoch": 0.19864652966906, + "grad_norm": 2.2829156937290587, + "learning_rate": 2.781837389260482e-06, + "loss": 0.587, + "step": 4814 + }, + { + "epoch": 0.19868779400841793, + "grad_norm": 2.299066658237065, + "learning_rate": 2.781733259487589e-06, + "loss": 0.5623, + "step": 4815 + }, + { + "epoch": 0.19872905834777585, + "grad_norm": 1.795703213244824, + "learning_rate": 2.7816291068197328e-06, + "loss": 0.4946, + "step": 4816 + }, + { + "epoch": 0.19877032268713377, + "grad_norm": 4.093149996649982, + "learning_rate": 2.7815249312587752e-06, + "loss": 0.5846, + "step": 4817 + }, + { + "epoch": 0.1988115870264917, + "grad_norm": 3.080455473487115, + "learning_rate": 2.7814207328065765e-06, + "loss": 0.5961, + "step": 4818 + }, + { + "epoch": 0.19885285136584963, + "grad_norm": 2.690160661756288, + "learning_rate": 2.7813165114649976e-06, + "loss": 0.5592, + "step": 4819 + }, + { + "epoch": 0.19889411570520757, + "grad_norm": 4.361529673153065, + "learning_rate": 2.7812122672359007e-06, + "loss": 0.5257, + "step": 4820 + }, + { + "epoch": 0.19893538004456549, + "grad_norm": 2.519718785273034, + "learning_rate": 2.781108000121148e-06, + "loss": 0.5822, + "step": 4821 + }, + { + "epoch": 0.1989766443839234, + "grad_norm": 2.68920202791092, + "learning_rate": 2.7810037101226007e-06, + "loss": 0.5614, + "step": 4822 + }, + { + "epoch": 0.19901790872328134, + "grad_norm": 2.310369624913637, + "learning_rate": 2.7808993972421233e-06, + "loss": 0.5838, + "step": 4823 + }, + { + "epoch": 0.19905917306263926, + "grad_norm": 8.566970502782521, + "learning_rate": 2.780795061481578e-06, + "loss": 0.5371, + "step": 4824 + }, + { + "epoch": 0.1991004374019972, + "grad_norm": 3.00660992124898, + "learning_rate": 2.780690702842829e-06, + "loss": 0.4906, + "step": 4825 + }, + { + "epoch": 0.19914170174135512, + "grad_norm": 2.2329853753799327, + "learning_rate": 2.78058632132774e-06, + "loss": 0.5383, + "step": 4826 + }, + { + "epoch": 0.19918296608071304, + "grad_norm": 3.3758737827830836, + "learning_rate": 2.7804819169381757e-06, + "loss": 0.5769, + "step": 4827 + }, + { + "epoch": 0.19922423042007098, + "grad_norm": 3.3768770349914687, + "learning_rate": 2.7803774896760014e-06, + "loss": 0.5636, + "step": 4828 + }, + { + "epoch": 0.1992654947594289, + "grad_norm": 2.7138472812743464, + "learning_rate": 2.780273039543082e-06, + "loss": 0.6071, + "step": 4829 + }, + { + "epoch": 0.19930675909878684, + "grad_norm": 3.4661089944015884, + "learning_rate": 2.780168566541283e-06, + "loss": 0.521, + "step": 4830 + }, + { + "epoch": 0.19934802343814476, + "grad_norm": 2.3100798805270992, + "learning_rate": 2.7800640706724715e-06, + "loss": 0.5745, + "step": 4831 + }, + { + "epoch": 0.19938928777750267, + "grad_norm": 2.517608212731553, + "learning_rate": 2.7799595519385133e-06, + "loss": 0.5492, + "step": 4832 + }, + { + "epoch": 0.19943055211686062, + "grad_norm": 2.5059882204325548, + "learning_rate": 2.7798550103412754e-06, + "loss": 0.6065, + "step": 4833 + }, + { + "epoch": 0.19947181645621853, + "grad_norm": 4.204367014516827, + "learning_rate": 2.7797504458826254e-06, + "loss": 0.5967, + "step": 4834 + }, + { + "epoch": 0.19951308079557648, + "grad_norm": 3.4009681260005196, + "learning_rate": 2.7796458585644307e-06, + "loss": 0.5615, + "step": 4835 + }, + { + "epoch": 0.1995543451349344, + "grad_norm": 3.706538156940226, + "learning_rate": 2.7795412483885606e-06, + "loss": 0.5724, + "step": 4836 + }, + { + "epoch": 0.1995956094742923, + "grad_norm": 7.649605817639463, + "learning_rate": 2.779436615356882e-06, + "loss": 0.6078, + "step": 4837 + }, + { + "epoch": 0.19963687381365025, + "grad_norm": 4.262727844575249, + "learning_rate": 2.779331959471265e-06, + "loss": 0.6027, + "step": 4838 + }, + { + "epoch": 0.19967813815300817, + "grad_norm": 4.069424072487063, + "learning_rate": 2.779227280733579e-06, + "loss": 0.5973, + "step": 4839 + }, + { + "epoch": 0.1997194024923661, + "grad_norm": 5.258179919781212, + "learning_rate": 2.7791225791456935e-06, + "loss": 0.5493, + "step": 4840 + }, + { + "epoch": 0.19976066683172403, + "grad_norm": 4.061987710737032, + "learning_rate": 2.779017854709479e-06, + "loss": 0.545, + "step": 4841 + }, + { + "epoch": 0.19980193117108194, + "grad_norm": 2.1700195545241945, + "learning_rate": 2.778913107426806e-06, + "loss": 0.5272, + "step": 4842 + }, + { + "epoch": 0.19984319551043989, + "grad_norm": 4.562606828485601, + "learning_rate": 2.778808337299545e-06, + "loss": 0.5757, + "step": 4843 + }, + { + "epoch": 0.1998844598497978, + "grad_norm": 4.849968139296379, + "learning_rate": 2.778703544329569e-06, + "loss": 0.5771, + "step": 4844 + }, + { + "epoch": 0.19992572418915572, + "grad_norm": 2.3373724636314304, + "learning_rate": 2.7785987285187484e-06, + "loss": 0.5411, + "step": 4845 + }, + { + "epoch": 0.19996698852851366, + "grad_norm": 6.3409136231269025, + "learning_rate": 2.7784938898689558e-06, + "loss": 0.5263, + "step": 4846 + }, + { + "epoch": 0.20000825286787158, + "grad_norm": 3.0054677533773786, + "learning_rate": 2.778389028382064e-06, + "loss": 0.6027, + "step": 4847 + }, + { + "epoch": 0.20004951720722952, + "grad_norm": 4.807195315058068, + "learning_rate": 2.7782841440599464e-06, + "loss": 0.5462, + "step": 4848 + }, + { + "epoch": 0.20009078154658744, + "grad_norm": 2.7508137456571196, + "learning_rate": 2.7781792369044758e-06, + "loss": 0.5657, + "step": 4849 + }, + { + "epoch": 0.20013204588594535, + "grad_norm": 3.7939597318531213, + "learning_rate": 2.778074306917527e-06, + "loss": 0.6131, + "step": 4850 + }, + { + "epoch": 0.2001733102253033, + "grad_norm": 4.019615196013512, + "learning_rate": 2.7779693541009734e-06, + "loss": 0.5334, + "step": 4851 + }, + { + "epoch": 0.2002145745646612, + "grad_norm": 2.3793360162808455, + "learning_rate": 2.77786437845669e-06, + "loss": 0.5374, + "step": 4852 + }, + { + "epoch": 0.20025583890401916, + "grad_norm": 2.394696902993444, + "learning_rate": 2.777759379986553e-06, + "loss": 0.4805, + "step": 4853 + }, + { + "epoch": 0.20029710324337707, + "grad_norm": 4.03454207098157, + "learning_rate": 2.777654358692436e-06, + "loss": 0.583, + "step": 4854 + }, + { + "epoch": 0.200338367582735, + "grad_norm": 22.75070129843086, + "learning_rate": 2.7775493145762166e-06, + "loss": 0.5919, + "step": 4855 + }, + { + "epoch": 0.20037963192209293, + "grad_norm": 4.352768430687898, + "learning_rate": 2.7774442476397703e-06, + "loss": 0.542, + "step": 4856 + }, + { + "epoch": 0.20042089626145085, + "grad_norm": 2.936892992739169, + "learning_rate": 2.777339157884974e-06, + "loss": 0.5624, + "step": 4857 + }, + { + "epoch": 0.2004621606008088, + "grad_norm": 2.936405418411701, + "learning_rate": 2.777234045313705e-06, + "loss": 0.4965, + "step": 4858 + }, + { + "epoch": 0.2005034249401667, + "grad_norm": 3.8139142213685067, + "learning_rate": 2.7771289099278405e-06, + "loss": 0.5253, + "step": 4859 + }, + { + "epoch": 0.20054468927952462, + "grad_norm": 6.817129551324412, + "learning_rate": 2.7770237517292595e-06, + "loss": 0.5442, + "step": 4860 + }, + { + "epoch": 0.20058595361888257, + "grad_norm": 3.5018171393135877, + "learning_rate": 2.776918570719839e-06, + "loss": 0.5637, + "step": 4861 + }, + { + "epoch": 0.20062721795824048, + "grad_norm": 4.0953968737341135, + "learning_rate": 2.7768133669014584e-06, + "loss": 0.5286, + "step": 4862 + }, + { + "epoch": 0.20066848229759843, + "grad_norm": 3.8462787748119527, + "learning_rate": 2.776708140275997e-06, + "loss": 0.5162, + "step": 4863 + }, + { + "epoch": 0.20070974663695634, + "grad_norm": 3.251237770127131, + "learning_rate": 2.776602890845335e-06, + "loss": 0.5441, + "step": 4864 + }, + { + "epoch": 0.20075101097631426, + "grad_norm": 5.586179028604614, + "learning_rate": 2.7764976186113516e-06, + "loss": 0.5573, + "step": 4865 + }, + { + "epoch": 0.2007922753156722, + "grad_norm": 5.987352878053823, + "learning_rate": 2.776392323575927e-06, + "loss": 0.5066, + "step": 4866 + }, + { + "epoch": 0.20083353965503012, + "grad_norm": 2.726092927966471, + "learning_rate": 2.7762870057409427e-06, + "loss": 0.5713, + "step": 4867 + }, + { + "epoch": 0.20087480399438806, + "grad_norm": 2.120833240446044, + "learning_rate": 2.7761816651082797e-06, + "loss": 0.5182, + "step": 4868 + }, + { + "epoch": 0.20091606833374598, + "grad_norm": 3.768479150629264, + "learning_rate": 2.77607630167982e-06, + "loss": 0.5341, + "step": 4869 + }, + { + "epoch": 0.2009573326731039, + "grad_norm": 6.178534157964438, + "learning_rate": 2.7759709154574446e-06, + "loss": 0.5599, + "step": 4870 + }, + { + "epoch": 0.20099859701246184, + "grad_norm": 6.870522611662005, + "learning_rate": 2.775865506443037e-06, + "loss": 0.5761, + "step": 4871 + }, + { + "epoch": 0.20103986135181975, + "grad_norm": 3.3804529590441748, + "learning_rate": 2.77576007463848e-06, + "loss": 0.5931, + "step": 4872 + }, + { + "epoch": 0.2010811256911777, + "grad_norm": 3.209469485435278, + "learning_rate": 2.775654620045656e-06, + "loss": 0.5752, + "step": 4873 + }, + { + "epoch": 0.2011223900305356, + "grad_norm": 2.4857492889722366, + "learning_rate": 2.7755491426664494e-06, + "loss": 0.5593, + "step": 4874 + }, + { + "epoch": 0.20116365436989353, + "grad_norm": 2.3467124302189135, + "learning_rate": 2.775443642502745e-06, + "loss": 0.5238, + "step": 4875 + }, + { + "epoch": 0.20120491870925147, + "grad_norm": 2.8907037453510873, + "learning_rate": 2.7753381195564255e-06, + "loss": 0.5637, + "step": 4876 + }, + { + "epoch": 0.2012461830486094, + "grad_norm": 2.3012054670517803, + "learning_rate": 2.7752325738293774e-06, + "loss": 0.5782, + "step": 4877 + }, + { + "epoch": 0.2012874473879673, + "grad_norm": 5.513092688421633, + "learning_rate": 2.7751270053234848e-06, + "loss": 0.5191, + "step": 4878 + }, + { + "epoch": 0.20132871172732525, + "grad_norm": 3.829291334628942, + "learning_rate": 2.7750214140406348e-06, + "loss": 0.6121, + "step": 4879 + }, + { + "epoch": 0.20136997606668317, + "grad_norm": 4.583490562259901, + "learning_rate": 2.774915799982712e-06, + "loss": 0.5357, + "step": 4880 + }, + { + "epoch": 0.2014112404060411, + "grad_norm": 6.649276593031029, + "learning_rate": 2.774810163151604e-06, + "loss": 0.5641, + "step": 4881 + }, + { + "epoch": 0.20145250474539902, + "grad_norm": 8.91645089821557, + "learning_rate": 2.7747045035491973e-06, + "loss": 0.5516, + "step": 4882 + }, + { + "epoch": 0.20149376908475694, + "grad_norm": 2.8268065448058075, + "learning_rate": 2.7745988211773792e-06, + "loss": 0.5333, + "step": 4883 + }, + { + "epoch": 0.20153503342411488, + "grad_norm": 2.4215366603599016, + "learning_rate": 2.7744931160380376e-06, + "loss": 0.5048, + "step": 4884 + }, + { + "epoch": 0.2015762977634728, + "grad_norm": 2.2202876399855813, + "learning_rate": 2.7743873881330613e-06, + "loss": 0.4912, + "step": 4885 + }, + { + "epoch": 0.20161756210283074, + "grad_norm": 2.7600424878550234, + "learning_rate": 2.7742816374643376e-06, + "loss": 0.5632, + "step": 4886 + }, + { + "epoch": 0.20165882644218866, + "grad_norm": 13.918778266899066, + "learning_rate": 2.774175864033756e-06, + "loss": 0.5213, + "step": 4887 + }, + { + "epoch": 0.20170009078154658, + "grad_norm": 2.458317743541455, + "learning_rate": 2.7740700678432067e-06, + "loss": 0.5873, + "step": 4888 + }, + { + "epoch": 0.20174135512090452, + "grad_norm": 3.7777387035456123, + "learning_rate": 2.7739642488945777e-06, + "loss": 0.5597, + "step": 4889 + }, + { + "epoch": 0.20178261946026244, + "grad_norm": 2.388690269274121, + "learning_rate": 2.773858407189761e-06, + "loss": 0.5611, + "step": 4890 + }, + { + "epoch": 0.20182388379962038, + "grad_norm": 5.876018489501875, + "learning_rate": 2.7737525427306464e-06, + "loss": 0.551, + "step": 4891 + }, + { + "epoch": 0.2018651481389783, + "grad_norm": 4.457950529822586, + "learning_rate": 2.7736466555191244e-06, + "loss": 0.5228, + "step": 4892 + }, + { + "epoch": 0.2019064124783362, + "grad_norm": 2.069613738552722, + "learning_rate": 2.7735407455570873e-06, + "loss": 0.5554, + "step": 4893 + }, + { + "epoch": 0.20194767681769416, + "grad_norm": 3.3550909526442285, + "learning_rate": 2.773434812846427e-06, + "loss": 0.5212, + "step": 4894 + }, + { + "epoch": 0.20198894115705207, + "grad_norm": 2.7733041427216922, + "learning_rate": 2.7733288573890348e-06, + "loss": 0.5642, + "step": 4895 + }, + { + "epoch": 0.20203020549641001, + "grad_norm": 15.209956233880893, + "learning_rate": 2.7732228791868043e-06, + "loss": 0.5954, + "step": 4896 + }, + { + "epoch": 0.20207146983576793, + "grad_norm": 6.4145455676348595, + "learning_rate": 2.7731168782416274e-06, + "loss": 0.5676, + "step": 4897 + }, + { + "epoch": 0.20211273417512585, + "grad_norm": 1.9509447882225905, + "learning_rate": 2.7730108545553984e-06, + "loss": 0.5151, + "step": 4898 + }, + { + "epoch": 0.2021539985144838, + "grad_norm": 2.7465302267322165, + "learning_rate": 2.7729048081300113e-06, + "loss": 0.5809, + "step": 4899 + }, + { + "epoch": 0.2021952628538417, + "grad_norm": 2.76121349199121, + "learning_rate": 2.7727987389673593e-06, + "loss": 0.5304, + "step": 4900 + }, + { + "epoch": 0.20223652719319965, + "grad_norm": 5.08358093591934, + "learning_rate": 2.772692647069339e-06, + "loss": 0.5862, + "step": 4901 + }, + { + "epoch": 0.20227779153255757, + "grad_norm": 3.810766538244959, + "learning_rate": 2.772586532437843e-06, + "loss": 0.5358, + "step": 4902 + }, + { + "epoch": 0.20231905587191548, + "grad_norm": 3.193370456924588, + "learning_rate": 2.7724803950747687e-06, + "loss": 0.6143, + "step": 4903 + }, + { + "epoch": 0.20236032021127343, + "grad_norm": 4.233540363586763, + "learning_rate": 2.772374234982011e-06, + "loss": 0.5522, + "step": 4904 + }, + { + "epoch": 0.20240158455063134, + "grad_norm": 2.314439278019503, + "learning_rate": 2.7722680521614664e-06, + "loss": 0.5405, + "step": 4905 + }, + { + "epoch": 0.20244284888998926, + "grad_norm": 2.8516710595236088, + "learning_rate": 2.7721618466150317e-06, + "loss": 0.5911, + "step": 4906 + }, + { + "epoch": 0.2024841132293472, + "grad_norm": 3.6150665787799574, + "learning_rate": 2.7720556183446046e-06, + "loss": 0.5861, + "step": 4907 + }, + { + "epoch": 0.20252537756870512, + "grad_norm": 3.391270786467112, + "learning_rate": 2.771949367352081e-06, + "loss": 0.5291, + "step": 4908 + }, + { + "epoch": 0.20256664190806306, + "grad_norm": 2.6654393845030606, + "learning_rate": 2.77184309363936e-06, + "loss": 0.4974, + "step": 4909 + }, + { + "epoch": 0.20260790624742098, + "grad_norm": 2.474109440505159, + "learning_rate": 2.77173679720834e-06, + "loss": 0.5853, + "step": 4910 + }, + { + "epoch": 0.2026491705867789, + "grad_norm": 3.1480423012814662, + "learning_rate": 2.771630478060919e-06, + "loss": 0.5573, + "step": 4911 + }, + { + "epoch": 0.20269043492613684, + "grad_norm": 2.281909854009894, + "learning_rate": 2.771524136198997e-06, + "loss": 0.5377, + "step": 4912 + }, + { + "epoch": 0.20273169926549475, + "grad_norm": 2.694421787732973, + "learning_rate": 2.771417771624473e-06, + "loss": 0.5935, + "step": 4913 + }, + { + "epoch": 0.2027729636048527, + "grad_norm": 3.965005498716923, + "learning_rate": 2.771311384339247e-06, + "loss": 0.5922, + "step": 4914 + }, + { + "epoch": 0.2028142279442106, + "grad_norm": 18.829247995407684, + "learning_rate": 2.771204974345219e-06, + "loss": 0.6064, + "step": 4915 + }, + { + "epoch": 0.20285549228356853, + "grad_norm": 3.358765213846884, + "learning_rate": 2.7710985416442902e-06, + "loss": 0.4724, + "step": 4916 + }, + { + "epoch": 0.20289675662292647, + "grad_norm": 2.496788793580791, + "learning_rate": 2.770992086238362e-06, + "loss": 0.5509, + "step": 4917 + }, + { + "epoch": 0.2029380209622844, + "grad_norm": 3.284553810213379, + "learning_rate": 2.7708856081293355e-06, + "loss": 0.5799, + "step": 4918 + }, + { + "epoch": 0.20297928530164233, + "grad_norm": 6.371186882866634, + "learning_rate": 2.7707791073191124e-06, + "loss": 0.5886, + "step": 4919 + }, + { + "epoch": 0.20302054964100025, + "grad_norm": 3.7589371193950853, + "learning_rate": 2.7706725838095955e-06, + "loss": 0.5449, + "step": 4920 + }, + { + "epoch": 0.20306181398035816, + "grad_norm": 3.561482835663878, + "learning_rate": 2.7705660376026874e-06, + "loss": 0.6126, + "step": 4921 + }, + { + "epoch": 0.2031030783197161, + "grad_norm": 2.0871542813940773, + "learning_rate": 2.770459468700292e-06, + "loss": 0.561, + "step": 4922 + }, + { + "epoch": 0.20314434265907402, + "grad_norm": 6.932202814797756, + "learning_rate": 2.770352877104312e-06, + "loss": 0.5846, + "step": 4923 + }, + { + "epoch": 0.20318560699843197, + "grad_norm": 9.694579427453078, + "learning_rate": 2.7702462628166516e-06, + "loss": 0.5943, + "step": 4924 + }, + { + "epoch": 0.20322687133778988, + "grad_norm": 85.83535989157077, + "learning_rate": 2.7701396258392153e-06, + "loss": 0.548, + "step": 4925 + }, + { + "epoch": 0.2032681356771478, + "grad_norm": 2.4870062615118953, + "learning_rate": 2.770032966173908e-06, + "loss": 0.5196, + "step": 4926 + }, + { + "epoch": 0.20330940001650574, + "grad_norm": 5.288681312736679, + "learning_rate": 2.7699262838226343e-06, + "loss": 0.5265, + "step": 4927 + }, + { + "epoch": 0.20335066435586366, + "grad_norm": 17.36589149814214, + "learning_rate": 2.769819578787301e-06, + "loss": 0.6108, + "step": 4928 + }, + { + "epoch": 0.2033919286952216, + "grad_norm": 3.019218982422784, + "learning_rate": 2.769712851069813e-06, + "loss": 0.5728, + "step": 4929 + }, + { + "epoch": 0.20343319303457952, + "grad_norm": 2.953182961191959, + "learning_rate": 2.769606100672077e-06, + "loss": 0.5356, + "step": 4930 + }, + { + "epoch": 0.20347445737393743, + "grad_norm": 11.11518264698131, + "learning_rate": 2.7694993275960005e-06, + "loss": 0.5402, + "step": 4931 + }, + { + "epoch": 0.20351572171329538, + "grad_norm": 5.433607731954455, + "learning_rate": 2.7693925318434896e-06, + "loss": 0.5059, + "step": 4932 + }, + { + "epoch": 0.2035569860526533, + "grad_norm": 2.56952296127044, + "learning_rate": 2.769285713416453e-06, + "loss": 0.5132, + "step": 4933 + }, + { + "epoch": 0.20359825039201124, + "grad_norm": 7.56681377117071, + "learning_rate": 2.769178872316798e-06, + "loss": 0.6061, + "step": 4934 + }, + { + "epoch": 0.20363951473136915, + "grad_norm": 2.9965110849455376, + "learning_rate": 2.7690720085464336e-06, + "loss": 0.5716, + "step": 4935 + }, + { + "epoch": 0.20368077907072707, + "grad_norm": 2.249642851819775, + "learning_rate": 2.7689651221072682e-06, + "loss": 0.5535, + "step": 4936 + }, + { + "epoch": 0.203722043410085, + "grad_norm": 4.741789496372629, + "learning_rate": 2.7688582130012115e-06, + "loss": 0.4817, + "step": 4937 + }, + { + "epoch": 0.20376330774944293, + "grad_norm": 2.872002830198883, + "learning_rate": 2.768751281230173e-06, + "loss": 0.6105, + "step": 4938 + }, + { + "epoch": 0.20380457208880084, + "grad_norm": 3.968792345548804, + "learning_rate": 2.7686443267960624e-06, + "loss": 0.5155, + "step": 4939 + }, + { + "epoch": 0.2038458364281588, + "grad_norm": 2.7802848579586255, + "learning_rate": 2.7685373497007903e-06, + "loss": 0.5431, + "step": 4940 + }, + { + "epoch": 0.2038871007675167, + "grad_norm": 3.386693702730042, + "learning_rate": 2.7684303499462683e-06, + "loss": 0.5674, + "step": 4941 + }, + { + "epoch": 0.20392836510687465, + "grad_norm": 75.43591116088344, + "learning_rate": 2.7683233275344065e-06, + "loss": 0.5866, + "step": 4942 + }, + { + "epoch": 0.20396962944623256, + "grad_norm": 2.927630351704785, + "learning_rate": 2.7682162824671175e-06, + "loss": 0.5602, + "step": 4943 + }, + { + "epoch": 0.20401089378559048, + "grad_norm": 2.37724500491965, + "learning_rate": 2.768109214746313e-06, + "loss": 0.5233, + "step": 4944 + }, + { + "epoch": 0.20405215812494842, + "grad_norm": 3.5480830097772444, + "learning_rate": 2.768002124373906e-06, + "loss": 0.5778, + "step": 4945 + }, + { + "epoch": 0.20409342246430634, + "grad_norm": 7.118886606750182, + "learning_rate": 2.7678950113518084e-06, + "loss": 0.5969, + "step": 4946 + }, + { + "epoch": 0.20413468680366428, + "grad_norm": 6.763493057165749, + "learning_rate": 2.767787875681935e-06, + "loss": 0.5733, + "step": 4947 + }, + { + "epoch": 0.2041759511430222, + "grad_norm": 3.4402228994107085, + "learning_rate": 2.7676807173661978e-06, + "loss": 0.557, + "step": 4948 + }, + { + "epoch": 0.20421721548238012, + "grad_norm": 3.6318449673785618, + "learning_rate": 2.767573536406512e-06, + "loss": 0.5531, + "step": 4949 + }, + { + "epoch": 0.20425847982173806, + "grad_norm": 5.995315878365572, + "learning_rate": 2.767466332804792e-06, + "loss": 0.541, + "step": 4950 + }, + { + "epoch": 0.20429974416109598, + "grad_norm": 4.36629720729987, + "learning_rate": 2.767359106562952e-06, + "loss": 0.5716, + "step": 4951 + }, + { + "epoch": 0.20434100850045392, + "grad_norm": 3.122091831608581, + "learning_rate": 2.7672518576829084e-06, + "loss": 0.6147, + "step": 4952 + }, + { + "epoch": 0.20438227283981183, + "grad_norm": 2.6766476675259434, + "learning_rate": 2.7671445861665765e-06, + "loss": 0.5684, + "step": 4953 + }, + { + "epoch": 0.20442353717916975, + "grad_norm": 5.247221220554046, + "learning_rate": 2.7670372920158722e-06, + "loss": 0.5819, + "step": 4954 + }, + { + "epoch": 0.2044648015185277, + "grad_norm": 4.304611737427481, + "learning_rate": 2.7669299752327123e-06, + "loss": 0.5607, + "step": 4955 + }, + { + "epoch": 0.2045060658578856, + "grad_norm": 4.278038892632013, + "learning_rate": 2.7668226358190135e-06, + "loss": 0.5599, + "step": 4956 + }, + { + "epoch": 0.20454733019724355, + "grad_norm": 2.1244104682426594, + "learning_rate": 2.7667152737766936e-06, + "loss": 0.5059, + "step": 4957 + }, + { + "epoch": 0.20458859453660147, + "grad_norm": 2.5385526862846612, + "learning_rate": 2.7666078891076698e-06, + "loss": 0.5192, + "step": 4958 + }, + { + "epoch": 0.2046298588759594, + "grad_norm": 6.471414287162093, + "learning_rate": 2.7665004818138607e-06, + "loss": 0.5897, + "step": 4959 + }, + { + "epoch": 0.20467112321531733, + "grad_norm": 4.142216527357658, + "learning_rate": 2.7663930518971847e-06, + "loss": 0.4442, + "step": 4960 + }, + { + "epoch": 0.20471238755467525, + "grad_norm": 2.8956880074136464, + "learning_rate": 2.766285599359561e-06, + "loss": 0.5291, + "step": 4961 + }, + { + "epoch": 0.2047536518940332, + "grad_norm": 3.1051631714607053, + "learning_rate": 2.766178124202908e-06, + "loss": 0.5404, + "step": 4962 + }, + { + "epoch": 0.2047949162333911, + "grad_norm": 4.1171206392002215, + "learning_rate": 2.7660706264291467e-06, + "loss": 0.5588, + "step": 4963 + }, + { + "epoch": 0.20483618057274902, + "grad_norm": 4.975759159792674, + "learning_rate": 2.7659631060401964e-06, + "loss": 0.5895, + "step": 4964 + }, + { + "epoch": 0.20487744491210697, + "grad_norm": 2.7415442318570147, + "learning_rate": 2.7658555630379788e-06, + "loss": 0.5211, + "step": 4965 + }, + { + "epoch": 0.20491870925146488, + "grad_norm": 3.120336679156868, + "learning_rate": 2.765747997424414e-06, + "loss": 0.5581, + "step": 4966 + }, + { + "epoch": 0.2049599735908228, + "grad_norm": 3.145307244525996, + "learning_rate": 2.7656404092014234e-06, + "loss": 0.5216, + "step": 4967 + }, + { + "epoch": 0.20500123793018074, + "grad_norm": 2.756115038129507, + "learning_rate": 2.7655327983709282e-06, + "loss": 0.5941, + "step": 4968 + }, + { + "epoch": 0.20504250226953866, + "grad_norm": 3.460897793019173, + "learning_rate": 2.7654251649348525e-06, + "loss": 0.5244, + "step": 4969 + }, + { + "epoch": 0.2050837666088966, + "grad_norm": 6.752557163823763, + "learning_rate": 2.765317508895117e-06, + "loss": 0.5905, + "step": 4970 + }, + { + "epoch": 0.20512503094825452, + "grad_norm": 2.3007934398979013, + "learning_rate": 2.7652098302536462e-06, + "loss": 0.5691, + "step": 4971 + }, + { + "epoch": 0.20516629528761243, + "grad_norm": 2.8052553092820647, + "learning_rate": 2.765102129012362e-06, + "loss": 0.5479, + "step": 4972 + }, + { + "epoch": 0.20520755962697038, + "grad_norm": 6.187084608916346, + "learning_rate": 2.7649944051731896e-06, + "loss": 0.5014, + "step": 4973 + }, + { + "epoch": 0.2052488239663283, + "grad_norm": 4.620311713219499, + "learning_rate": 2.764886658738052e-06, + "loss": 0.5818, + "step": 4974 + }, + { + "epoch": 0.20529008830568624, + "grad_norm": 2.8394830542404623, + "learning_rate": 2.764778889708875e-06, + "loss": 0.5753, + "step": 4975 + }, + { + "epoch": 0.20533135264504415, + "grad_norm": 8.02372212289158, + "learning_rate": 2.7646710980875823e-06, + "loss": 0.5501, + "step": 4976 + }, + { + "epoch": 0.20537261698440207, + "grad_norm": 3.9855050804631884, + "learning_rate": 2.7645632838761007e-06, + "loss": 0.5665, + "step": 4977 + }, + { + "epoch": 0.20541388132376, + "grad_norm": 12.69150744431329, + "learning_rate": 2.764455447076355e-06, + "loss": 0.5488, + "step": 4978 + }, + { + "epoch": 0.20545514566311793, + "grad_norm": 2.7055405643668027, + "learning_rate": 2.7643475876902716e-06, + "loss": 0.5803, + "step": 4979 + }, + { + "epoch": 0.20549641000247587, + "grad_norm": 4.810052260960752, + "learning_rate": 2.764239705719778e-06, + "loss": 0.5299, + "step": 4980 + }, + { + "epoch": 0.2055376743418338, + "grad_norm": 3.656713858841634, + "learning_rate": 2.7641318011668007e-06, + "loss": 0.5372, + "step": 4981 + }, + { + "epoch": 0.2055789386811917, + "grad_norm": 4.586330787239707, + "learning_rate": 2.7640238740332665e-06, + "loss": 0.5543, + "step": 4982 + }, + { + "epoch": 0.20562020302054965, + "grad_norm": 3.642014096108813, + "learning_rate": 2.763915924321104e-06, + "loss": 0.5136, + "step": 4983 + }, + { + "epoch": 0.20566146735990756, + "grad_norm": 2.1725184604155063, + "learning_rate": 2.7638079520322416e-06, + "loss": 0.5291, + "step": 4984 + }, + { + "epoch": 0.2057027316992655, + "grad_norm": 2.358428597546315, + "learning_rate": 2.763699957168607e-06, + "loss": 0.544, + "step": 4985 + }, + { + "epoch": 0.20574399603862342, + "grad_norm": 2.416424895665551, + "learning_rate": 2.76359193973213e-06, + "loss": 0.5385, + "step": 4986 + }, + { + "epoch": 0.20578526037798134, + "grad_norm": 4.796764395200212, + "learning_rate": 2.7634838997247405e-06, + "loss": 0.5038, + "step": 4987 + }, + { + "epoch": 0.20582652471733928, + "grad_norm": 3.0964759530296755, + "learning_rate": 2.763375837148367e-06, + "loss": 0.5921, + "step": 4988 + }, + { + "epoch": 0.2058677890566972, + "grad_norm": 10.150504919074683, + "learning_rate": 2.7632677520049414e-06, + "loss": 0.5655, + "step": 4989 + }, + { + "epoch": 0.20590905339605514, + "grad_norm": 3.210412968689958, + "learning_rate": 2.763159644296393e-06, + "loss": 0.5044, + "step": 4990 + }, + { + "epoch": 0.20595031773541306, + "grad_norm": 3.7852047269643188, + "learning_rate": 2.7630515140246537e-06, + "loss": 0.6105, + "step": 4991 + }, + { + "epoch": 0.20599158207477097, + "grad_norm": 3.0110067881362426, + "learning_rate": 2.7629433611916542e-06, + "loss": 0.5591, + "step": 4992 + }, + { + "epoch": 0.20603284641412892, + "grad_norm": 2.8922376887479904, + "learning_rate": 2.7628351857993273e-06, + "loss": 0.5751, + "step": 4993 + }, + { + "epoch": 0.20607411075348683, + "grad_norm": 2.528730175129552, + "learning_rate": 2.7627269878496045e-06, + "loss": 0.5484, + "step": 4994 + }, + { + "epoch": 0.20611537509284478, + "grad_norm": 5.598288159575058, + "learning_rate": 2.7626187673444193e-06, + "loss": 0.4962, + "step": 4995 + }, + { + "epoch": 0.2061566394322027, + "grad_norm": 2.982004014870273, + "learning_rate": 2.762510524285704e-06, + "loss": 0.5358, + "step": 4996 + }, + { + "epoch": 0.2061979037715606, + "grad_norm": 4.8711561077331185, + "learning_rate": 2.7624022586753927e-06, + "loss": 0.5468, + "step": 4997 + }, + { + "epoch": 0.20623916811091855, + "grad_norm": 2.5963223187606763, + "learning_rate": 2.762293970515419e-06, + "loss": 0.5915, + "step": 4998 + }, + { + "epoch": 0.20628043245027647, + "grad_norm": 2.9711168028400783, + "learning_rate": 2.762185659807717e-06, + "loss": 0.5729, + "step": 4999 + }, + { + "epoch": 0.20632169678963438, + "grad_norm": 2.2793098312740394, + "learning_rate": 2.7620773265542216e-06, + "loss": 0.5458, + "step": 5000 + }, + { + "epoch": 0.20636296112899233, + "grad_norm": 3.337133473441427, + "learning_rate": 2.761968970756868e-06, + "loss": 0.5193, + "step": 5001 + }, + { + "epoch": 0.20640422546835024, + "grad_norm": 3.382745286014523, + "learning_rate": 2.761860592417592e-06, + "loss": 0.58, + "step": 5002 + }, + { + "epoch": 0.2064454898077082, + "grad_norm": 8.637433423284724, + "learning_rate": 2.7617521915383283e-06, + "loss": 0.5524, + "step": 5003 + }, + { + "epoch": 0.2064867541470661, + "grad_norm": 6.39181229098363, + "learning_rate": 2.7616437681210147e-06, + "loss": 0.5788, + "step": 5004 + }, + { + "epoch": 0.20652801848642402, + "grad_norm": 2.641280178376005, + "learning_rate": 2.7615353221675865e-06, + "loss": 0.5802, + "step": 5005 + }, + { + "epoch": 0.20656928282578196, + "grad_norm": 2.5992500372153367, + "learning_rate": 2.7614268536799822e-06, + "loss": 0.5767, + "step": 5006 + }, + { + "epoch": 0.20661054716513988, + "grad_norm": 3.3616842916783414, + "learning_rate": 2.7613183626601386e-06, + "loss": 0.5542, + "step": 5007 + }, + { + "epoch": 0.20665181150449782, + "grad_norm": 2.4536992002612887, + "learning_rate": 2.7612098491099935e-06, + "loss": 0.5197, + "step": 5008 + }, + { + "epoch": 0.20669307584385574, + "grad_norm": 3.0077978274533432, + "learning_rate": 2.761101313031486e-06, + "loss": 0.5982, + "step": 5009 + }, + { + "epoch": 0.20673434018321366, + "grad_norm": 2.1113716308572683, + "learning_rate": 2.7609927544265536e-06, + "loss": 0.5351, + "step": 5010 + }, + { + "epoch": 0.2067756045225716, + "grad_norm": 2.678580294832408, + "learning_rate": 2.760884173297136e-06, + "loss": 0.5638, + "step": 5011 + }, + { + "epoch": 0.20681686886192951, + "grad_norm": 4.180150317343912, + "learning_rate": 2.7607755696451732e-06, + "loss": 0.5264, + "step": 5012 + }, + { + "epoch": 0.20685813320128746, + "grad_norm": 2.981836146210647, + "learning_rate": 2.760666943472605e-06, + "loss": 0.5848, + "step": 5013 + }, + { + "epoch": 0.20689939754064537, + "grad_norm": 2.489903118836915, + "learning_rate": 2.7605582947813714e-06, + "loss": 0.5642, + "step": 5014 + }, + { + "epoch": 0.2069406618800033, + "grad_norm": 8.595071869826409, + "learning_rate": 2.760449623573413e-06, + "loss": 0.5644, + "step": 5015 + }, + { + "epoch": 0.20698192621936123, + "grad_norm": 3.6397063122601936, + "learning_rate": 2.760340929850671e-06, + "loss": 0.5872, + "step": 5016 + }, + { + "epoch": 0.20702319055871915, + "grad_norm": 6.953730416362713, + "learning_rate": 2.760232213615087e-06, + "loss": 0.571, + "step": 5017 + }, + { + "epoch": 0.2070644548980771, + "grad_norm": 2.5816255622392195, + "learning_rate": 2.7601234748686034e-06, + "loss": 0.5532, + "step": 5018 + }, + { + "epoch": 0.207105719237435, + "grad_norm": 9.680229555859512, + "learning_rate": 2.7600147136131622e-06, + "loss": 0.5301, + "step": 5019 + }, + { + "epoch": 0.20714698357679293, + "grad_norm": 4.431534871875346, + "learning_rate": 2.7599059298507064e-06, + "loss": 0.5334, + "step": 5020 + }, + { + "epoch": 0.20718824791615087, + "grad_norm": 3.0031754941438145, + "learning_rate": 2.7597971235831784e-06, + "loss": 0.5229, + "step": 5021 + }, + { + "epoch": 0.20722951225550879, + "grad_norm": 3.558452372186266, + "learning_rate": 2.7596882948125224e-06, + "loss": 0.5558, + "step": 5022 + }, + { + "epoch": 0.20727077659486673, + "grad_norm": 2.9388729979583816, + "learning_rate": 2.759579443540682e-06, + "loss": 0.5957, + "step": 5023 + }, + { + "epoch": 0.20731204093422465, + "grad_norm": 2.5558384737070474, + "learning_rate": 2.7594705697696017e-06, + "loss": 0.5436, + "step": 5024 + }, + { + "epoch": 0.20735330527358256, + "grad_norm": 3.5205710962663037, + "learning_rate": 2.759361673501227e-06, + "loss": 0.5399, + "step": 5025 + }, + { + "epoch": 0.2073945696129405, + "grad_norm": 3.202611723466593, + "learning_rate": 2.7592527547375018e-06, + "loss": 0.5421, + "step": 5026 + }, + { + "epoch": 0.20743583395229842, + "grad_norm": 5.28055151812344, + "learning_rate": 2.7591438134803723e-06, + "loss": 0.5912, + "step": 5027 + }, + { + "epoch": 0.20747709829165636, + "grad_norm": 4.7040510059176555, + "learning_rate": 2.7590348497317842e-06, + "loss": 0.5904, + "step": 5028 + }, + { + "epoch": 0.20751836263101428, + "grad_norm": 2.2804020250429233, + "learning_rate": 2.7589258634936845e-06, + "loss": 0.5717, + "step": 5029 + }, + { + "epoch": 0.2075596269703722, + "grad_norm": 2.4558847578060483, + "learning_rate": 2.7588168547680197e-06, + "loss": 0.5987, + "step": 5030 + }, + { + "epoch": 0.20760089130973014, + "grad_norm": 3.360970931895975, + "learning_rate": 2.7587078235567366e-06, + "loss": 0.5831, + "step": 5031 + }, + { + "epoch": 0.20764215564908806, + "grad_norm": 6.960568434632821, + "learning_rate": 2.7585987698617826e-06, + "loss": 0.662, + "step": 5032 + }, + { + "epoch": 0.20768341998844597, + "grad_norm": 3.83964707412479, + "learning_rate": 2.7584896936851062e-06, + "loss": 0.5658, + "step": 5033 + }, + { + "epoch": 0.20772468432780392, + "grad_norm": 4.301566002068407, + "learning_rate": 2.7583805950286557e-06, + "loss": 0.5385, + "step": 5034 + }, + { + "epoch": 0.20776594866716183, + "grad_norm": 3.863723598574003, + "learning_rate": 2.7582714738943796e-06, + "loss": 0.5822, + "step": 5035 + }, + { + "epoch": 0.20780721300651978, + "grad_norm": 3.0698363389165224, + "learning_rate": 2.7581623302842275e-06, + "loss": 0.5567, + "step": 5036 + }, + { + "epoch": 0.2078484773458777, + "grad_norm": 4.578908646188571, + "learning_rate": 2.7580531642001486e-06, + "loss": 0.5796, + "step": 5037 + }, + { + "epoch": 0.2078897416852356, + "grad_norm": 7.0702421297425175, + "learning_rate": 2.757943975644093e-06, + "loss": 0.6078, + "step": 5038 + }, + { + "epoch": 0.20793100602459355, + "grad_norm": 2.202529406240654, + "learning_rate": 2.757834764618011e-06, + "loss": 0.5006, + "step": 5039 + }, + { + "epoch": 0.20797227036395147, + "grad_norm": 5.555149027227045, + "learning_rate": 2.757725531123854e-06, + "loss": 0.6232, + "step": 5040 + }, + { + "epoch": 0.2080135347033094, + "grad_norm": 4.935728901007652, + "learning_rate": 2.757616275163572e-06, + "loss": 0.5215, + "step": 5041 + }, + { + "epoch": 0.20805479904266733, + "grad_norm": 2.646313421958177, + "learning_rate": 2.7575069967391174e-06, + "loss": 0.6066, + "step": 5042 + }, + { + "epoch": 0.20809606338202524, + "grad_norm": 6.057035468952556, + "learning_rate": 2.757397695852443e-06, + "loss": 0.6173, + "step": 5043 + }, + { + "epoch": 0.2081373277213832, + "grad_norm": 3.7837587324036748, + "learning_rate": 2.757288372505499e-06, + "loss": 0.5253, + "step": 5044 + }, + { + "epoch": 0.2081785920607411, + "grad_norm": 4.219302947234677, + "learning_rate": 2.75717902670024e-06, + "loss": 0.6002, + "step": 5045 + }, + { + "epoch": 0.20821985640009905, + "grad_norm": 2.863669991693859, + "learning_rate": 2.7570696584386183e-06, + "loss": 0.532, + "step": 5046 + }, + { + "epoch": 0.20826112073945696, + "grad_norm": 10.02861302057395, + "learning_rate": 2.756960267722588e-06, + "loss": 0.5344, + "step": 5047 + }, + { + "epoch": 0.20830238507881488, + "grad_norm": 2.458865300024356, + "learning_rate": 2.7568508545541027e-06, + "loss": 0.4855, + "step": 5048 + }, + { + "epoch": 0.20834364941817282, + "grad_norm": 2.682213631072743, + "learning_rate": 2.756741418935117e-06, + "loss": 0.567, + "step": 5049 + }, + { + "epoch": 0.20838491375753074, + "grad_norm": 4.793862964816363, + "learning_rate": 2.756631960867586e-06, + "loss": 0.5773, + "step": 5050 + }, + { + "epoch": 0.20842617809688868, + "grad_norm": 4.17607402749397, + "learning_rate": 2.7565224803534643e-06, + "loss": 0.572, + "step": 5051 + }, + { + "epoch": 0.2084674424362466, + "grad_norm": 2.8477988512523074, + "learning_rate": 2.7564129773947076e-06, + "loss": 0.5582, + "step": 5052 + }, + { + "epoch": 0.2085087067756045, + "grad_norm": 3.8840961029398673, + "learning_rate": 2.7563034519932724e-06, + "loss": 0.5928, + "step": 5053 + }, + { + "epoch": 0.20854997111496246, + "grad_norm": 3.956320632649216, + "learning_rate": 2.756193904151115e-06, + "loss": 0.5152, + "step": 5054 + }, + { + "epoch": 0.20859123545432037, + "grad_norm": 3.2013538173070915, + "learning_rate": 2.7560843338701917e-06, + "loss": 0.5397, + "step": 5055 + }, + { + "epoch": 0.20863249979367832, + "grad_norm": 10.312276139642433, + "learning_rate": 2.7559747411524593e-06, + "loss": 0.5485, + "step": 5056 + }, + { + "epoch": 0.20867376413303623, + "grad_norm": 2.9076356653133235, + "learning_rate": 2.7558651259998764e-06, + "loss": 0.543, + "step": 5057 + }, + { + "epoch": 0.20871502847239415, + "grad_norm": 2.477132359289187, + "learning_rate": 2.755755488414401e-06, + "loss": 0.4545, + "step": 5058 + }, + { + "epoch": 0.2087562928117521, + "grad_norm": 10.081713126921441, + "learning_rate": 2.7556458283979913e-06, + "loss": 0.5364, + "step": 5059 + }, + { + "epoch": 0.20879755715111, + "grad_norm": 4.706520047617748, + "learning_rate": 2.7555361459526053e-06, + "loss": 0.5292, + "step": 5060 + }, + { + "epoch": 0.20883882149046792, + "grad_norm": 3.3636359630075616, + "learning_rate": 2.7554264410802034e-06, + "loss": 0.6032, + "step": 5061 + }, + { + "epoch": 0.20888008582982587, + "grad_norm": 2.705597303188522, + "learning_rate": 2.7553167137827445e-06, + "loss": 0.5599, + "step": 5062 + }, + { + "epoch": 0.20892135016918378, + "grad_norm": 3.35536119963268, + "learning_rate": 2.7552069640621885e-06, + "loss": 0.5096, + "step": 5063 + }, + { + "epoch": 0.20896261450854173, + "grad_norm": 2.6871303279294017, + "learning_rate": 2.7550971919204967e-06, + "loss": 0.5551, + "step": 5064 + }, + { + "epoch": 0.20900387884789964, + "grad_norm": 3.448505137473753, + "learning_rate": 2.7549873973596292e-06, + "loss": 0.6017, + "step": 5065 + }, + { + "epoch": 0.20904514318725756, + "grad_norm": 7.908120646909983, + "learning_rate": 2.7548775803815465e-06, + "loss": 0.6125, + "step": 5066 + }, + { + "epoch": 0.2090864075266155, + "grad_norm": 4.473726248425263, + "learning_rate": 2.754767740988212e-06, + "loss": 0.6357, + "step": 5067 + }, + { + "epoch": 0.20912767186597342, + "grad_norm": 3.0506382103214382, + "learning_rate": 2.754657879181586e-06, + "loss": 0.5676, + "step": 5068 + }, + { + "epoch": 0.20916893620533136, + "grad_norm": 2.9573719808233956, + "learning_rate": 2.754547994963632e-06, + "loss": 0.5622, + "step": 5069 + }, + { + "epoch": 0.20921020054468928, + "grad_norm": 2.863483653208933, + "learning_rate": 2.7544380883363122e-06, + "loss": 0.6247, + "step": 5070 + }, + { + "epoch": 0.2092514648840472, + "grad_norm": 4.47648171600428, + "learning_rate": 2.7543281593015897e-06, + "loss": 0.5973, + "step": 5071 + }, + { + "epoch": 0.20929272922340514, + "grad_norm": 2.5006472624561393, + "learning_rate": 2.754218207861429e-06, + "loss": 0.5148, + "step": 5072 + }, + { + "epoch": 0.20933399356276305, + "grad_norm": 6.631770887190095, + "learning_rate": 2.7541082340177932e-06, + "loss": 0.537, + "step": 5073 + }, + { + "epoch": 0.209375257902121, + "grad_norm": 3.4017860049523074, + "learning_rate": 2.753998237772647e-06, + "loss": 0.5226, + "step": 5074 + }, + { + "epoch": 0.20941652224147891, + "grad_norm": 2.1250882208298436, + "learning_rate": 2.7538882191279555e-06, + "loss": 0.5438, + "step": 5075 + }, + { + "epoch": 0.20945778658083683, + "grad_norm": 3.076287809939956, + "learning_rate": 2.7537781780856838e-06, + "loss": 0.585, + "step": 5076 + }, + { + "epoch": 0.20949905092019477, + "grad_norm": 8.239457491487334, + "learning_rate": 2.7536681146477966e-06, + "loss": 0.5891, + "step": 5077 + }, + { + "epoch": 0.2095403152595527, + "grad_norm": 3.6877059090831485, + "learning_rate": 2.7535580288162615e-06, + "loss": 0.5375, + "step": 5078 + }, + { + "epoch": 0.20958157959891063, + "grad_norm": 4.131193450008186, + "learning_rate": 2.7534479205930437e-06, + "loss": 0.5506, + "step": 5079 + }, + { + "epoch": 0.20962284393826855, + "grad_norm": 6.5707522286198525, + "learning_rate": 2.7533377899801104e-06, + "loss": 0.5906, + "step": 5080 + }, + { + "epoch": 0.20966410827762647, + "grad_norm": 2.6788177881210076, + "learning_rate": 2.753227636979429e-06, + "loss": 0.5123, + "step": 5081 + }, + { + "epoch": 0.2097053726169844, + "grad_norm": 3.0362020189371135, + "learning_rate": 2.7531174615929668e-06, + "loss": 0.5294, + "step": 5082 + }, + { + "epoch": 0.20974663695634232, + "grad_norm": 5.035249696500459, + "learning_rate": 2.7530072638226917e-06, + "loss": 0.5492, + "step": 5083 + }, + { + "epoch": 0.20978790129570027, + "grad_norm": 2.789857709251847, + "learning_rate": 2.7528970436705726e-06, + "loss": 0.5097, + "step": 5084 + }, + { + "epoch": 0.20982916563505818, + "grad_norm": 2.424472338892593, + "learning_rate": 2.752786801138578e-06, + "loss": 0.542, + "step": 5085 + }, + { + "epoch": 0.2098704299744161, + "grad_norm": 2.8900444771639924, + "learning_rate": 2.752676536228677e-06, + "loss": 0.5121, + "step": 5086 + }, + { + "epoch": 0.20991169431377404, + "grad_norm": 2.4577338645949256, + "learning_rate": 2.7525662489428392e-06, + "loss": 0.5269, + "step": 5087 + }, + { + "epoch": 0.20995295865313196, + "grad_norm": 5.667144346739044, + "learning_rate": 2.752455939283035e-06, + "loss": 0.6058, + "step": 5088 + }, + { + "epoch": 0.2099942229924899, + "grad_norm": 2.836444368034586, + "learning_rate": 2.7523456072512345e-06, + "loss": 0.5548, + "step": 5089 + }, + { + "epoch": 0.21003548733184782, + "grad_norm": 4.816234602323747, + "learning_rate": 2.7522352528494085e-06, + "loss": 0.5692, + "step": 5090 + }, + { + "epoch": 0.21007675167120574, + "grad_norm": 3.3861485021054722, + "learning_rate": 2.7521248760795287e-06, + "loss": 0.6073, + "step": 5091 + }, + { + "epoch": 0.21011801601056368, + "grad_norm": 8.020445990221312, + "learning_rate": 2.752014476943566e-06, + "loss": 0.5468, + "step": 5092 + }, + { + "epoch": 0.2101592803499216, + "grad_norm": 2.447824986959643, + "learning_rate": 2.7519040554434923e-06, + "loss": 0.5484, + "step": 5093 + }, + { + "epoch": 0.2102005446892795, + "grad_norm": 2.696717077701865, + "learning_rate": 2.751793611581281e-06, + "loss": 0.612, + "step": 5094 + }, + { + "epoch": 0.21024180902863746, + "grad_norm": 2.8606366133325256, + "learning_rate": 2.751683145358904e-06, + "loss": 0.5156, + "step": 5095 + }, + { + "epoch": 0.21028307336799537, + "grad_norm": 2.3081984423281976, + "learning_rate": 2.7515726567783347e-06, + "loss": 0.5513, + "step": 5096 + }, + { + "epoch": 0.21032433770735331, + "grad_norm": 2.826078214416483, + "learning_rate": 2.751462145841547e-06, + "loss": 0.5614, + "step": 5097 + }, + { + "epoch": 0.21036560204671123, + "grad_norm": 2.3913188749445498, + "learning_rate": 2.7513516125505145e-06, + "loss": 0.5937, + "step": 5098 + }, + { + "epoch": 0.21040686638606915, + "grad_norm": 3.6735027190868936, + "learning_rate": 2.751241056907212e-06, + "loss": 0.5845, + "step": 5099 + }, + { + "epoch": 0.2104481307254271, + "grad_norm": 6.208867606689839, + "learning_rate": 2.751130478913614e-06, + "loss": 0.5658, + "step": 5100 + }, + { + "epoch": 0.210489395064785, + "grad_norm": 7.712625901569511, + "learning_rate": 2.7510198785716955e-06, + "loss": 0.5372, + "step": 5101 + }, + { + "epoch": 0.21053065940414295, + "grad_norm": 2.7116659246114754, + "learning_rate": 2.750909255883433e-06, + "loss": 0.6009, + "step": 5102 + }, + { + "epoch": 0.21057192374350087, + "grad_norm": 2.1103447471733454, + "learning_rate": 2.7507986108508016e-06, + "loss": 0.5479, + "step": 5103 + }, + { + "epoch": 0.21061318808285878, + "grad_norm": 1.8722022676137284, + "learning_rate": 2.7506879434757777e-06, + "loss": 0.5, + "step": 5104 + }, + { + "epoch": 0.21065445242221673, + "grad_norm": 4.527772980716128, + "learning_rate": 2.750577253760339e-06, + "loss": 0.5303, + "step": 5105 + }, + { + "epoch": 0.21069571676157464, + "grad_norm": 15.256235534748262, + "learning_rate": 2.7504665417064617e-06, + "loss": 0.6449, + "step": 5106 + }, + { + "epoch": 0.21073698110093259, + "grad_norm": 3.395285791351621, + "learning_rate": 2.750355807316124e-06, + "loss": 0.5196, + "step": 5107 + }, + { + "epoch": 0.2107782454402905, + "grad_norm": 2.7216845531325498, + "learning_rate": 2.750245050591303e-06, + "loss": 0.571, + "step": 5108 + }, + { + "epoch": 0.21081950977964842, + "grad_norm": 4.967503487777812, + "learning_rate": 2.7501342715339785e-06, + "loss": 0.5711, + "step": 5109 + }, + { + "epoch": 0.21086077411900636, + "grad_norm": 2.8481096526756, + "learning_rate": 2.750023470146128e-06, + "loss": 0.5842, + "step": 5110 + }, + { + "epoch": 0.21090203845836428, + "grad_norm": 4.413071653831015, + "learning_rate": 2.7499126464297316e-06, + "loss": 0.5757, + "step": 5111 + }, + { + "epoch": 0.21094330279772222, + "grad_norm": 3.360389529625229, + "learning_rate": 2.7498018003867685e-06, + "loss": 0.5618, + "step": 5112 + }, + { + "epoch": 0.21098456713708014, + "grad_norm": 13.833585317339937, + "learning_rate": 2.7496909320192186e-06, + "loss": 0.5853, + "step": 5113 + }, + { + "epoch": 0.21102583147643805, + "grad_norm": 3.9060337332688144, + "learning_rate": 2.749580041329063e-06, + "loss": 0.6346, + "step": 5114 + }, + { + "epoch": 0.211067095815796, + "grad_norm": 3.6764818370143586, + "learning_rate": 2.7494691283182814e-06, + "loss": 0.558, + "step": 5115 + }, + { + "epoch": 0.2111083601551539, + "grad_norm": 3.275464005753784, + "learning_rate": 2.7493581929888553e-06, + "loss": 0.5549, + "step": 5116 + }, + { + "epoch": 0.21114962449451186, + "grad_norm": 3.127100677324351, + "learning_rate": 2.7492472353427665e-06, + "loss": 0.5648, + "step": 5117 + }, + { + "epoch": 0.21119088883386977, + "grad_norm": 2.8583652111799216, + "learning_rate": 2.749136255381997e-06, + "loss": 0.5731, + "step": 5118 + }, + { + "epoch": 0.2112321531732277, + "grad_norm": 5.783058809385066, + "learning_rate": 2.7490252531085294e-06, + "loss": 0.5713, + "step": 5119 + }, + { + "epoch": 0.21127341751258563, + "grad_norm": 2.5895019988966905, + "learning_rate": 2.748914228524346e-06, + "loss": 0.5547, + "step": 5120 + }, + { + "epoch": 0.21131468185194355, + "grad_norm": 4.148935860856974, + "learning_rate": 2.74880318163143e-06, + "loss": 0.5803, + "step": 5121 + }, + { + "epoch": 0.21135594619130146, + "grad_norm": 3.5014525086993427, + "learning_rate": 2.748692112431765e-06, + "loss": 0.5457, + "step": 5122 + }, + { + "epoch": 0.2113972105306594, + "grad_norm": 31.906439921434988, + "learning_rate": 2.7485810209273355e-06, + "loss": 0.5856, + "step": 5123 + }, + { + "epoch": 0.21143847487001732, + "grad_norm": 6.529913989515552, + "learning_rate": 2.7484699071201256e-06, + "loss": 0.5211, + "step": 5124 + }, + { + "epoch": 0.21147973920937527, + "grad_norm": 2.5958306427680977, + "learning_rate": 2.74835877101212e-06, + "loss": 0.5572, + "step": 5125 + }, + { + "epoch": 0.21152100354873318, + "grad_norm": 5.930283528354456, + "learning_rate": 2.7482476126053034e-06, + "loss": 0.5407, + "step": 5126 + }, + { + "epoch": 0.2115622678880911, + "grad_norm": 3.2860692839899754, + "learning_rate": 2.748136431901662e-06, + "loss": 0.5672, + "step": 5127 + }, + { + "epoch": 0.21160353222744904, + "grad_norm": 8.658258111363645, + "learning_rate": 2.7480252289031817e-06, + "loss": 0.5823, + "step": 5128 + }, + { + "epoch": 0.21164479656680696, + "grad_norm": 3.287919182538829, + "learning_rate": 2.747914003611849e-06, + "loss": 0.5069, + "step": 5129 + }, + { + "epoch": 0.2116860609061649, + "grad_norm": 2.061873336546123, + "learning_rate": 2.7478027560296496e-06, + "loss": 0.6286, + "step": 5130 + }, + { + "epoch": 0.21172732524552282, + "grad_norm": 3.499328760372559, + "learning_rate": 2.747691486158572e-06, + "loss": 0.5221, + "step": 5131 + }, + { + "epoch": 0.21176858958488073, + "grad_norm": 11.869870199418633, + "learning_rate": 2.747580194000603e-06, + "loss": 0.5151, + "step": 5132 + }, + { + "epoch": 0.21180985392423868, + "grad_norm": 5.829500144491197, + "learning_rate": 2.7474688795577308e-06, + "loss": 0.5254, + "step": 5133 + }, + { + "epoch": 0.2118511182635966, + "grad_norm": 5.15906679511779, + "learning_rate": 2.747357542831944e-06, + "loss": 0.5173, + "step": 5134 + }, + { + "epoch": 0.21189238260295454, + "grad_norm": 3.232926211331119, + "learning_rate": 2.747246183825231e-06, + "loss": 0.5043, + "step": 5135 + }, + { + "epoch": 0.21193364694231245, + "grad_norm": 29.372089998773202, + "learning_rate": 2.7471348025395805e-06, + "loss": 0.527, + "step": 5136 + }, + { + "epoch": 0.21197491128167037, + "grad_norm": 4.003165964390408, + "learning_rate": 2.747023398976983e-06, + "loss": 0.5184, + "step": 5137 + }, + { + "epoch": 0.2120161756210283, + "grad_norm": 3.3255424416325288, + "learning_rate": 2.746911973139428e-06, + "loss": 0.5327, + "step": 5138 + }, + { + "epoch": 0.21205743996038623, + "grad_norm": 3.7933332132848645, + "learning_rate": 2.7468005250289056e-06, + "loss": 0.5778, + "step": 5139 + }, + { + "epoch": 0.21209870429974417, + "grad_norm": 3.9284996008059525, + "learning_rate": 2.746689054647407e-06, + "loss": 0.6453, + "step": 5140 + }, + { + "epoch": 0.2121399686391021, + "grad_norm": 2.4594005153211684, + "learning_rate": 2.7465775619969234e-06, + "loss": 0.5889, + "step": 5141 + }, + { + "epoch": 0.21218123297846, + "grad_norm": 2.6298798397466667, + "learning_rate": 2.7464660470794454e-06, + "loss": 0.5346, + "step": 5142 + }, + { + "epoch": 0.21222249731781795, + "grad_norm": 4.3465405280985285, + "learning_rate": 2.746354509896966e-06, + "loss": 0.5718, + "step": 5143 + }, + { + "epoch": 0.21226376165717586, + "grad_norm": 2.3851993819200703, + "learning_rate": 2.7462429504514777e-06, + "loss": 0.5503, + "step": 5144 + }, + { + "epoch": 0.2123050259965338, + "grad_norm": 3.3269040787658493, + "learning_rate": 2.7461313687449724e-06, + "loss": 0.5537, + "step": 5145 + }, + { + "epoch": 0.21234629033589172, + "grad_norm": 3.1015142522403796, + "learning_rate": 2.7460197647794433e-06, + "loss": 0.5554, + "step": 5146 + }, + { + "epoch": 0.21238755467524964, + "grad_norm": 3.0783141026018574, + "learning_rate": 2.7459081385568843e-06, + "loss": 0.5499, + "step": 5147 + }, + { + "epoch": 0.21242881901460758, + "grad_norm": 6.714971456886624, + "learning_rate": 2.745796490079289e-06, + "loss": 0.5599, + "step": 5148 + }, + { + "epoch": 0.2124700833539655, + "grad_norm": 2.490539264240737, + "learning_rate": 2.7456848193486514e-06, + "loss": 0.5039, + "step": 5149 + }, + { + "epoch": 0.21251134769332344, + "grad_norm": 6.270357398881115, + "learning_rate": 2.745573126366967e-06, + "loss": 0.5012, + "step": 5150 + }, + { + "epoch": 0.21255261203268136, + "grad_norm": 2.2782990407056913, + "learning_rate": 2.7454614111362314e-06, + "loss": 0.546, + "step": 5151 + }, + { + "epoch": 0.21259387637203928, + "grad_norm": 3.4592759603950034, + "learning_rate": 2.745349673658439e-06, + "loss": 0.5239, + "step": 5152 + }, + { + "epoch": 0.21263514071139722, + "grad_norm": 4.549779354073137, + "learning_rate": 2.7452379139355854e-06, + "loss": 0.5715, + "step": 5153 + }, + { + "epoch": 0.21267640505075514, + "grad_norm": 3.734138425654413, + "learning_rate": 2.745126131969668e-06, + "loss": 0.5694, + "step": 5154 + }, + { + "epoch": 0.21271766939011305, + "grad_norm": 3.0252705042228123, + "learning_rate": 2.7450143277626832e-06, + "loss": 0.5991, + "step": 5155 + }, + { + "epoch": 0.212758933729471, + "grad_norm": 3.300029385998307, + "learning_rate": 2.7449025013166275e-06, + "loss": 0.5214, + "step": 5156 + }, + { + "epoch": 0.2128001980688289, + "grad_norm": 5.906561245875914, + "learning_rate": 2.744790652633499e-06, + "loss": 0.5924, + "step": 5157 + }, + { + "epoch": 0.21284146240818685, + "grad_norm": 2.649817420671265, + "learning_rate": 2.7446787817152958e-06, + "loss": 0.5573, + "step": 5158 + }, + { + "epoch": 0.21288272674754477, + "grad_norm": 2.712352092779808, + "learning_rate": 2.7445668885640156e-06, + "loss": 0.5891, + "step": 5159 + }, + { + "epoch": 0.2129239910869027, + "grad_norm": 3.282248996426434, + "learning_rate": 2.744454973181657e-06, + "loss": 0.5893, + "step": 5160 + }, + { + "epoch": 0.21296525542626063, + "grad_norm": 2.9606861829937556, + "learning_rate": 2.74434303557022e-06, + "loss": 0.5349, + "step": 5161 + }, + { + "epoch": 0.21300651976561855, + "grad_norm": 3.5672161864047087, + "learning_rate": 2.7442310757317036e-06, + "loss": 0.5358, + "step": 5162 + }, + { + "epoch": 0.2130477841049765, + "grad_norm": 4.823405646386176, + "learning_rate": 2.744119093668107e-06, + "loss": 0.567, + "step": 5163 + }, + { + "epoch": 0.2130890484443344, + "grad_norm": 2.5245253776781174, + "learning_rate": 2.7440070893814313e-06, + "loss": 0.5462, + "step": 5164 + }, + { + "epoch": 0.21313031278369232, + "grad_norm": 3.0031509522249484, + "learning_rate": 2.743895062873677e-06, + "loss": 0.5849, + "step": 5165 + }, + { + "epoch": 0.21317157712305027, + "grad_norm": 4.889398506156259, + "learning_rate": 2.7437830141468453e-06, + "loss": 0.5636, + "step": 5166 + }, + { + "epoch": 0.21321284146240818, + "grad_norm": 2.411802787157853, + "learning_rate": 2.743670943202937e-06, + "loss": 0.5247, + "step": 5167 + }, + { + "epoch": 0.21325410580176613, + "grad_norm": 5.206724177493022, + "learning_rate": 2.743558850043955e-06, + "loss": 0.5674, + "step": 5168 + }, + { + "epoch": 0.21329537014112404, + "grad_norm": 3.1020109097657698, + "learning_rate": 2.7434467346719006e-06, + "loss": 0.5671, + "step": 5169 + }, + { + "epoch": 0.21333663448048196, + "grad_norm": 4.172999829995012, + "learning_rate": 2.7433345970887776e-06, + "loss": 0.5459, + "step": 5170 + }, + { + "epoch": 0.2133778988198399, + "grad_norm": 2.664946829216862, + "learning_rate": 2.7432224372965875e-06, + "loss": 0.5213, + "step": 5171 + }, + { + "epoch": 0.21341916315919782, + "grad_norm": 3.3301801331122656, + "learning_rate": 2.743110255297335e-06, + "loss": 0.5507, + "step": 5172 + }, + { + "epoch": 0.21346042749855576, + "grad_norm": 2.523854662754821, + "learning_rate": 2.7429980510930238e-06, + "loss": 0.549, + "step": 5173 + }, + { + "epoch": 0.21350169183791368, + "grad_norm": 2.6253803835711964, + "learning_rate": 2.7428858246856578e-06, + "loss": 0.5348, + "step": 5174 + }, + { + "epoch": 0.2135429561772716, + "grad_norm": 2.9147657734684365, + "learning_rate": 2.7427735760772415e-06, + "loss": 0.5131, + "step": 5175 + }, + { + "epoch": 0.21358422051662954, + "grad_norm": 3.7834203412464, + "learning_rate": 2.7426613052697806e-06, + "loss": 0.5904, + "step": 5176 + }, + { + "epoch": 0.21362548485598745, + "grad_norm": 4.217917719922727, + "learning_rate": 2.7425490122652796e-06, + "loss": 0.489, + "step": 5177 + }, + { + "epoch": 0.2136667491953454, + "grad_norm": 3.1905981979960516, + "learning_rate": 2.742436697065745e-06, + "loss": 0.5603, + "step": 5178 + }, + { + "epoch": 0.2137080135347033, + "grad_norm": 3.411947204709647, + "learning_rate": 2.742324359673183e-06, + "loss": 0.534, + "step": 5179 + }, + { + "epoch": 0.21374927787406123, + "grad_norm": 3.121940052083727, + "learning_rate": 2.7422120000896e-06, + "loss": 0.5521, + "step": 5180 + }, + { + "epoch": 0.21379054221341917, + "grad_norm": 4.282797036117838, + "learning_rate": 2.7420996183170034e-06, + "loss": 0.6271, + "step": 5181 + }, + { + "epoch": 0.2138318065527771, + "grad_norm": 4.546317460961014, + "learning_rate": 2.7419872143574e-06, + "loss": 0.508, + "step": 5182 + }, + { + "epoch": 0.213873070892135, + "grad_norm": 3.6292716592891985, + "learning_rate": 2.7418747882127984e-06, + "loss": 0.5633, + "step": 5183 + }, + { + "epoch": 0.21391433523149295, + "grad_norm": 3.671311152000166, + "learning_rate": 2.7417623398852058e-06, + "loss": 0.5356, + "step": 5184 + }, + { + "epoch": 0.21395559957085086, + "grad_norm": 6.744035417057912, + "learning_rate": 2.741649869376632e-06, + "loss": 0.5486, + "step": 5185 + }, + { + "epoch": 0.2139968639102088, + "grad_norm": 3.0996439364542168, + "learning_rate": 2.741537376689085e-06, + "loss": 0.5728, + "step": 5186 + }, + { + "epoch": 0.21403812824956672, + "grad_norm": 3.1921686254352557, + "learning_rate": 2.741424861824575e-06, + "loss": 0.4943, + "step": 5187 + }, + { + "epoch": 0.21407939258892464, + "grad_norm": 3.265332540610826, + "learning_rate": 2.741312324785111e-06, + "loss": 0.6375, + "step": 5188 + }, + { + "epoch": 0.21412065692828258, + "grad_norm": 4.048743744663245, + "learning_rate": 2.7411997655727035e-06, + "loss": 0.5577, + "step": 5189 + }, + { + "epoch": 0.2141619212676405, + "grad_norm": 5.121830415655035, + "learning_rate": 2.7410871841893635e-06, + "loss": 0.5757, + "step": 5190 + }, + { + "epoch": 0.21420318560699844, + "grad_norm": 4.054633481834815, + "learning_rate": 2.7409745806371016e-06, + "loss": 0.5974, + "step": 5191 + }, + { + "epoch": 0.21424444994635636, + "grad_norm": 2.3374824569922774, + "learning_rate": 2.7408619549179293e-06, + "loss": 0.5347, + "step": 5192 + }, + { + "epoch": 0.21428571428571427, + "grad_norm": 2.2691574845895612, + "learning_rate": 2.7407493070338586e-06, + "loss": 0.5356, + "step": 5193 + }, + { + "epoch": 0.21432697862507222, + "grad_norm": 2.5362597348388385, + "learning_rate": 2.740636636986901e-06, + "loss": 0.5528, + "step": 5194 + }, + { + "epoch": 0.21436824296443013, + "grad_norm": 2.9639436014533924, + "learning_rate": 2.740523944779069e-06, + "loss": 0.5453, + "step": 5195 + }, + { + "epoch": 0.21440950730378808, + "grad_norm": 2.5597866898116024, + "learning_rate": 2.7404112304123766e-06, + "loss": 0.5797, + "step": 5196 + }, + { + "epoch": 0.214450771643146, + "grad_norm": 2.7204349322321026, + "learning_rate": 2.7402984938888368e-06, + "loss": 0.5239, + "step": 5197 + }, + { + "epoch": 0.2144920359825039, + "grad_norm": 3.522841778552995, + "learning_rate": 2.7401857352104628e-06, + "loss": 0.5331, + "step": 5198 + }, + { + "epoch": 0.21453330032186185, + "grad_norm": 2.343814798435982, + "learning_rate": 2.740072954379269e-06, + "loss": 0.4953, + "step": 5199 + }, + { + "epoch": 0.21457456466121977, + "grad_norm": 18.337358559221478, + "learning_rate": 2.7399601513972703e-06, + "loss": 0.6056, + "step": 5200 + }, + { + "epoch": 0.2146158290005777, + "grad_norm": 5.726931097301141, + "learning_rate": 2.739847326266481e-06, + "loss": 0.5652, + "step": 5201 + }, + { + "epoch": 0.21465709333993563, + "grad_norm": 11.034300311641296, + "learning_rate": 2.7397344789889175e-06, + "loss": 0.513, + "step": 5202 + }, + { + "epoch": 0.21469835767929354, + "grad_norm": 3.6694241853434146, + "learning_rate": 2.7396216095665947e-06, + "loss": 0.563, + "step": 5203 + }, + { + "epoch": 0.2147396220186515, + "grad_norm": 3.446847712389118, + "learning_rate": 2.7395087180015283e-06, + "loss": 0.5245, + "step": 5204 + }, + { + "epoch": 0.2147808863580094, + "grad_norm": 3.293469968804378, + "learning_rate": 2.739395804295736e-06, + "loss": 0.5467, + "step": 5205 + }, + { + "epoch": 0.21482215069736735, + "grad_norm": 4.4459135294073695, + "learning_rate": 2.739282868451234e-06, + "loss": 0.5888, + "step": 5206 + }, + { + "epoch": 0.21486341503672526, + "grad_norm": 2.0492087306023756, + "learning_rate": 2.73916991047004e-06, + "loss": 0.5905, + "step": 5207 + }, + { + "epoch": 0.21490467937608318, + "grad_norm": 5.976522448252514, + "learning_rate": 2.739056930354171e-06, + "loss": 0.5455, + "step": 5208 + }, + { + "epoch": 0.21494594371544112, + "grad_norm": 3.397951976597265, + "learning_rate": 2.738943928105646e-06, + "loss": 0.4697, + "step": 5209 + }, + { + "epoch": 0.21498720805479904, + "grad_norm": 2.92840624183086, + "learning_rate": 2.7388309037264827e-06, + "loss": 0.5292, + "step": 5210 + }, + { + "epoch": 0.21502847239415698, + "grad_norm": 4.912340484664224, + "learning_rate": 2.7387178572187005e-06, + "loss": 0.588, + "step": 5211 + }, + { + "epoch": 0.2150697367335149, + "grad_norm": 2.7571610900072283, + "learning_rate": 2.738604788584319e-06, + "loss": 0.5399, + "step": 5212 + }, + { + "epoch": 0.21511100107287282, + "grad_norm": 2.5314455925809862, + "learning_rate": 2.7384916978253565e-06, + "loss": 0.531, + "step": 5213 + }, + { + "epoch": 0.21515226541223076, + "grad_norm": 2.598130419734315, + "learning_rate": 2.7383785849438346e-06, + "loss": 0.5086, + "step": 5214 + }, + { + "epoch": 0.21519352975158867, + "grad_norm": 2.5618118736685966, + "learning_rate": 2.738265449941773e-06, + "loss": 0.5664, + "step": 5215 + }, + { + "epoch": 0.2152347940909466, + "grad_norm": 2.6771655010735196, + "learning_rate": 2.7381522928211934e-06, + "loss": 0.587, + "step": 5216 + }, + { + "epoch": 0.21527605843030453, + "grad_norm": 3.950230728758469, + "learning_rate": 2.7380391135841162e-06, + "loss": 0.6535, + "step": 5217 + }, + { + "epoch": 0.21531732276966245, + "grad_norm": 4.066976221102942, + "learning_rate": 2.737925912232563e-06, + "loss": 0.5707, + "step": 5218 + }, + { + "epoch": 0.2153585871090204, + "grad_norm": 2.206838292027185, + "learning_rate": 2.7378126887685563e-06, + "loss": 0.507, + "step": 5219 + }, + { + "epoch": 0.2153998514483783, + "grad_norm": 5.099218174390139, + "learning_rate": 2.737699443194119e-06, + "loss": 0.5783, + "step": 5220 + }, + { + "epoch": 0.21544111578773623, + "grad_norm": 3.380479067991125, + "learning_rate": 2.7375861755112727e-06, + "loss": 0.6408, + "step": 5221 + }, + { + "epoch": 0.21548238012709417, + "grad_norm": 2.2593921466586315, + "learning_rate": 2.7374728857220416e-06, + "loss": 0.5383, + "step": 5222 + }, + { + "epoch": 0.21552364446645209, + "grad_norm": 3.4399052060017468, + "learning_rate": 2.7373595738284495e-06, + "loss": 0.5881, + "step": 5223 + }, + { + "epoch": 0.21556490880581003, + "grad_norm": 2.7282533180078308, + "learning_rate": 2.7372462398325194e-06, + "loss": 0.5782, + "step": 5224 + }, + { + "epoch": 0.21560617314516795, + "grad_norm": 4.997903833530464, + "learning_rate": 2.737132883736276e-06, + "loss": 0.5844, + "step": 5225 + }, + { + "epoch": 0.21564743748452586, + "grad_norm": 2.954131629767615, + "learning_rate": 2.7370195055417455e-06, + "loss": 0.576, + "step": 5226 + }, + { + "epoch": 0.2156887018238838, + "grad_norm": 2.617324929517629, + "learning_rate": 2.7369061052509517e-06, + "loss": 0.5874, + "step": 5227 + }, + { + "epoch": 0.21572996616324172, + "grad_norm": 3.544827211085516, + "learning_rate": 2.7367926828659206e-06, + "loss": 0.5795, + "step": 5228 + }, + { + "epoch": 0.21577123050259966, + "grad_norm": 3.9565662343225982, + "learning_rate": 2.7366792383886774e-06, + "loss": 0.592, + "step": 5229 + }, + { + "epoch": 0.21581249484195758, + "grad_norm": 6.491520470942764, + "learning_rate": 2.7365657718212503e-06, + "loss": 0.5666, + "step": 5230 + }, + { + "epoch": 0.2158537591813155, + "grad_norm": 3.02232399372532, + "learning_rate": 2.736452283165665e-06, + "loss": 0.4878, + "step": 5231 + }, + { + "epoch": 0.21589502352067344, + "grad_norm": 3.5038355053955867, + "learning_rate": 2.7363387724239485e-06, + "loss": 0.5047, + "step": 5232 + }, + { + "epoch": 0.21593628786003136, + "grad_norm": 2.5636162826459694, + "learning_rate": 2.736225239598129e-06, + "loss": 0.5742, + "step": 5233 + }, + { + "epoch": 0.2159775521993893, + "grad_norm": 13.925512309302812, + "learning_rate": 2.736111684690234e-06, + "loss": 0.5844, + "step": 5234 + }, + { + "epoch": 0.21601881653874722, + "grad_norm": 10.635298676925244, + "learning_rate": 2.7359981077022917e-06, + "loss": 0.5301, + "step": 5235 + }, + { + "epoch": 0.21606008087810513, + "grad_norm": 6.140586861321886, + "learning_rate": 2.7358845086363314e-06, + "loss": 0.5745, + "step": 5236 + }, + { + "epoch": 0.21610134521746308, + "grad_norm": 47.668295497868066, + "learning_rate": 2.7357708874943823e-06, + "loss": 0.5523, + "step": 5237 + }, + { + "epoch": 0.216142609556821, + "grad_norm": 5.683578220410356, + "learning_rate": 2.7356572442784733e-06, + "loss": 0.5709, + "step": 5238 + }, + { + "epoch": 0.21618387389617894, + "grad_norm": 6.857528180828879, + "learning_rate": 2.7355435789906353e-06, + "loss": 0.5819, + "step": 5239 + }, + { + "epoch": 0.21622513823553685, + "grad_norm": 6.688169766983359, + "learning_rate": 2.7354298916328973e-06, + "loss": 0.5826, + "step": 5240 + }, + { + "epoch": 0.21626640257489477, + "grad_norm": 3.9551572968559996, + "learning_rate": 2.7353161822072918e-06, + "loss": 0.5241, + "step": 5241 + }, + { + "epoch": 0.2163076669142527, + "grad_norm": 20.791497833330347, + "learning_rate": 2.735202450715849e-06, + "loss": 0.5345, + "step": 5242 + }, + { + "epoch": 0.21634893125361063, + "grad_norm": 3.3736077429091345, + "learning_rate": 2.7350886971605994e-06, + "loss": 0.4987, + "step": 5243 + }, + { + "epoch": 0.21639019559296854, + "grad_norm": 3.298894572957954, + "learning_rate": 2.7349749215435767e-06, + "loss": 0.5387, + "step": 5244 + }, + { + "epoch": 0.2164314599323265, + "grad_norm": 6.659274585718046, + "learning_rate": 2.734861123866813e-06, + "loss": 0.523, + "step": 5245 + }, + { + "epoch": 0.2164727242716844, + "grad_norm": 5.595489326382794, + "learning_rate": 2.734747304132339e-06, + "loss": 0.5634, + "step": 5246 + }, + { + "epoch": 0.21651398861104235, + "grad_norm": 2.482970856722814, + "learning_rate": 2.7346334623421904e-06, + "loss": 0.5985, + "step": 5247 + }, + { + "epoch": 0.21655525295040026, + "grad_norm": 4.686218882028268, + "learning_rate": 2.7345195984983997e-06, + "loss": 0.5899, + "step": 5248 + }, + { + "epoch": 0.21659651728975818, + "grad_norm": 3.941206906508189, + "learning_rate": 2.7344057126029998e-06, + "loss": 0.5728, + "step": 5249 + }, + { + "epoch": 0.21663778162911612, + "grad_norm": 3.610342004412427, + "learning_rate": 2.734291804658026e-06, + "loss": 0.5602, + "step": 5250 + }, + { + "epoch": 0.21667904596847404, + "grad_norm": 3.3254927045778704, + "learning_rate": 2.7341778746655134e-06, + "loss": 0.5993, + "step": 5251 + }, + { + "epoch": 0.21672031030783198, + "grad_norm": 2.7706822329271303, + "learning_rate": 2.7340639226274964e-06, + "loss": 0.5218, + "step": 5252 + }, + { + "epoch": 0.2167615746471899, + "grad_norm": 4.911091154980985, + "learning_rate": 2.7339499485460107e-06, + "loss": 0.5957, + "step": 5253 + }, + { + "epoch": 0.2168028389865478, + "grad_norm": 2.128776030987424, + "learning_rate": 2.7338359524230916e-06, + "loss": 0.5276, + "step": 5254 + }, + { + "epoch": 0.21684410332590576, + "grad_norm": 2.117901615422596, + "learning_rate": 2.733721934260776e-06, + "loss": 0.52, + "step": 5255 + }, + { + "epoch": 0.21688536766526367, + "grad_norm": 2.7368070591098173, + "learning_rate": 2.7336078940611007e-06, + "loss": 0.5834, + "step": 5256 + }, + { + "epoch": 0.21692663200462162, + "grad_norm": 2.9786236389098866, + "learning_rate": 2.733493831826102e-06, + "loss": 0.5571, + "step": 5257 + }, + { + "epoch": 0.21696789634397953, + "grad_norm": 3.698812710758454, + "learning_rate": 2.7333797475578176e-06, + "loss": 0.5725, + "step": 5258 + }, + { + "epoch": 0.21700916068333745, + "grad_norm": 4.289184702405655, + "learning_rate": 2.7332656412582857e-06, + "loss": 0.4566, + "step": 5259 + }, + { + "epoch": 0.2170504250226954, + "grad_norm": 3.787220957573007, + "learning_rate": 2.7331515129295444e-06, + "loss": 0.5409, + "step": 5260 + }, + { + "epoch": 0.2170916893620533, + "grad_norm": 3.5071811975373977, + "learning_rate": 2.733037362573632e-06, + "loss": 0.5358, + "step": 5261 + }, + { + "epoch": 0.21713295370141125, + "grad_norm": 2.860853514910588, + "learning_rate": 2.732923190192588e-06, + "loss": 0.5753, + "step": 5262 + }, + { + "epoch": 0.21717421804076917, + "grad_norm": 5.1602942617068726, + "learning_rate": 2.7328089957884513e-06, + "loss": 0.5464, + "step": 5263 + }, + { + "epoch": 0.21721548238012708, + "grad_norm": 3.3182170840590675, + "learning_rate": 2.732694779363262e-06, + "loss": 0.538, + "step": 5264 + }, + { + "epoch": 0.21725674671948503, + "grad_norm": 3.4114559243680205, + "learning_rate": 2.73258054091906e-06, + "loss": 0.4971, + "step": 5265 + }, + { + "epoch": 0.21729801105884294, + "grad_norm": 4.069729933928012, + "learning_rate": 2.7324662804578863e-06, + "loss": 0.564, + "step": 5266 + }, + { + "epoch": 0.2173392753982009, + "grad_norm": 3.035557247594782, + "learning_rate": 2.7323519979817816e-06, + "loss": 0.5594, + "step": 5267 + }, + { + "epoch": 0.2173805397375588, + "grad_norm": 2.282095927079191, + "learning_rate": 2.7322376934927874e-06, + "loss": 0.5529, + "step": 5268 + }, + { + "epoch": 0.21742180407691672, + "grad_norm": 3.252244002628042, + "learning_rate": 2.7321233669929457e-06, + "loss": 0.6073, + "step": 5269 + }, + { + "epoch": 0.21746306841627466, + "grad_norm": 3.264992232826417, + "learning_rate": 2.7320090184842976e-06, + "loss": 0.5621, + "step": 5270 + }, + { + "epoch": 0.21750433275563258, + "grad_norm": 3.6654979377181744, + "learning_rate": 2.731894647968887e-06, + "loss": 0.6056, + "step": 5271 + }, + { + "epoch": 0.21754559709499052, + "grad_norm": 2.9702851750866857, + "learning_rate": 2.7317802554487562e-06, + "loss": 0.5474, + "step": 5272 + }, + { + "epoch": 0.21758686143434844, + "grad_norm": 1.9372012069369309, + "learning_rate": 2.7316658409259483e-06, + "loss": 0.5363, + "step": 5273 + }, + { + "epoch": 0.21762812577370635, + "grad_norm": 3.2607169408557724, + "learning_rate": 2.7315514044025077e-06, + "loss": 0.5187, + "step": 5274 + }, + { + "epoch": 0.2176693901130643, + "grad_norm": 9.063199501715212, + "learning_rate": 2.731436945880478e-06, + "loss": 0.4909, + "step": 5275 + }, + { + "epoch": 0.21771065445242221, + "grad_norm": 2.251293333228359, + "learning_rate": 2.731322465361904e-06, + "loss": 0.5497, + "step": 5276 + }, + { + "epoch": 0.21775191879178013, + "grad_norm": 6.44183621982247, + "learning_rate": 2.73120796284883e-06, + "loss": 0.5882, + "step": 5277 + }, + { + "epoch": 0.21779318313113807, + "grad_norm": 2.5788011350431344, + "learning_rate": 2.7310934383433026e-06, + "loss": 0.5368, + "step": 5278 + }, + { + "epoch": 0.217834447470496, + "grad_norm": 2.106419873856462, + "learning_rate": 2.730978891847366e-06, + "loss": 0.5447, + "step": 5279 + }, + { + "epoch": 0.21787571180985393, + "grad_norm": 5.576962924425348, + "learning_rate": 2.730864323363067e-06, + "loss": 0.5221, + "step": 5280 + }, + { + "epoch": 0.21791697614921185, + "grad_norm": 2.874198153425923, + "learning_rate": 2.730749732892452e-06, + "loss": 0.5511, + "step": 5281 + }, + { + "epoch": 0.21795824048856977, + "grad_norm": 28.202176539091692, + "learning_rate": 2.730635120437568e-06, + "loss": 0.514, + "step": 5282 + }, + { + "epoch": 0.2179995048279277, + "grad_norm": 2.9366559656728906, + "learning_rate": 2.7305204860004624e-06, + "loss": 0.5876, + "step": 5283 + }, + { + "epoch": 0.21804076916728563, + "grad_norm": 11.44614363729022, + "learning_rate": 2.730405829583182e-06, + "loss": 0.5704, + "step": 5284 + }, + { + "epoch": 0.21808203350664357, + "grad_norm": 6.194748157520918, + "learning_rate": 2.7302911511877763e-06, + "loss": 0.5482, + "step": 5285 + }, + { + "epoch": 0.21812329784600148, + "grad_norm": 2.20503663047506, + "learning_rate": 2.7301764508162925e-06, + "loss": 0.5768, + "step": 5286 + }, + { + "epoch": 0.2181645621853594, + "grad_norm": 4.97188016791159, + "learning_rate": 2.73006172847078e-06, + "loss": 0.5109, + "step": 5287 + }, + { + "epoch": 0.21820582652471734, + "grad_norm": 3.398249702039399, + "learning_rate": 2.7299469841532876e-06, + "loss": 0.5069, + "step": 5288 + }, + { + "epoch": 0.21824709086407526, + "grad_norm": 2.7297871568330794, + "learning_rate": 2.729832217865866e-06, + "loss": 0.5616, + "step": 5289 + }, + { + "epoch": 0.2182883552034332, + "grad_norm": 2.8475961421733893, + "learning_rate": 2.7297174296105633e-06, + "loss": 0.5778, + "step": 5290 + }, + { + "epoch": 0.21832961954279112, + "grad_norm": 2.835312553357934, + "learning_rate": 2.7296026193894314e-06, + "loss": 0.5592, + "step": 5291 + }, + { + "epoch": 0.21837088388214904, + "grad_norm": 5.995470111512106, + "learning_rate": 2.729487787204521e-06, + "loss": 0.5077, + "step": 5292 + }, + { + "epoch": 0.21841214822150698, + "grad_norm": 2.6335900122499636, + "learning_rate": 2.729372933057883e-06, + "loss": 0.5715, + "step": 5293 + }, + { + "epoch": 0.2184534125608649, + "grad_norm": 5.230578036852608, + "learning_rate": 2.729258056951569e-06, + "loss": 0.5106, + "step": 5294 + }, + { + "epoch": 0.21849467690022284, + "grad_norm": 11.284884199621377, + "learning_rate": 2.7291431588876307e-06, + "loss": 0.5284, + "step": 5295 + }, + { + "epoch": 0.21853594123958076, + "grad_norm": 5.234395699615699, + "learning_rate": 2.729028238868121e-06, + "loss": 0.5832, + "step": 5296 + }, + { + "epoch": 0.21857720557893867, + "grad_norm": 4.418239646422891, + "learning_rate": 2.728913296895092e-06, + "loss": 0.5449, + "step": 5297 + }, + { + "epoch": 0.21861846991829662, + "grad_norm": 3.163679506554846, + "learning_rate": 2.7287983329705976e-06, + "loss": 0.5288, + "step": 5298 + }, + { + "epoch": 0.21865973425765453, + "grad_norm": 2.631038186391893, + "learning_rate": 2.728683347096691e-06, + "loss": 0.5019, + "step": 5299 + }, + { + "epoch": 0.21870099859701247, + "grad_norm": 4.590318527181422, + "learning_rate": 2.728568339275426e-06, + "loss": 0.56, + "step": 5300 + }, + { + "epoch": 0.2187422629363704, + "grad_norm": 3.422727841425796, + "learning_rate": 2.7284533095088573e-06, + "loss": 0.5662, + "step": 5301 + }, + { + "epoch": 0.2187835272757283, + "grad_norm": 4.196587809410169, + "learning_rate": 2.728338257799039e-06, + "loss": 0.5427, + "step": 5302 + }, + { + "epoch": 0.21882479161508625, + "grad_norm": 2.46388384232192, + "learning_rate": 2.728223184148027e-06, + "loss": 0.5012, + "step": 5303 + }, + { + "epoch": 0.21886605595444417, + "grad_norm": 2.2243780421783694, + "learning_rate": 2.728108088557876e-06, + "loss": 0.5802, + "step": 5304 + }, + { + "epoch": 0.2189073202938021, + "grad_norm": 16.681925911184685, + "learning_rate": 2.7279929710306427e-06, + "loss": 0.5706, + "step": 5305 + }, + { + "epoch": 0.21894858463316003, + "grad_norm": 3.015292715304323, + "learning_rate": 2.7278778315683823e-06, + "loss": 0.5646, + "step": 5306 + }, + { + "epoch": 0.21898984897251794, + "grad_norm": 2.9734507209455847, + "learning_rate": 2.7277626701731524e-06, + "loss": 0.5938, + "step": 5307 + }, + { + "epoch": 0.21903111331187589, + "grad_norm": 3.2292665571981525, + "learning_rate": 2.72764748684701e-06, + "loss": 0.5894, + "step": 5308 + }, + { + "epoch": 0.2190723776512338, + "grad_norm": 2.006638653282212, + "learning_rate": 2.7275322815920127e-06, + "loss": 0.5321, + "step": 5309 + }, + { + "epoch": 0.21911364199059172, + "grad_norm": 2.520601829642779, + "learning_rate": 2.7274170544102174e-06, + "loss": 0.5592, + "step": 5310 + }, + { + "epoch": 0.21915490632994966, + "grad_norm": 2.8321945541081672, + "learning_rate": 2.727301805303683e-06, + "loss": 0.5371, + "step": 5311 + }, + { + "epoch": 0.21919617066930758, + "grad_norm": 2.5900135578994514, + "learning_rate": 2.7271865342744687e-06, + "loss": 0.5249, + "step": 5312 + }, + { + "epoch": 0.21923743500866552, + "grad_norm": 2.431591716632457, + "learning_rate": 2.727071241324633e-06, + "loss": 0.528, + "step": 5313 + }, + { + "epoch": 0.21927869934802344, + "grad_norm": 4.409492877076678, + "learning_rate": 2.7269559264562347e-06, + "loss": 0.5634, + "step": 5314 + }, + { + "epoch": 0.21931996368738135, + "grad_norm": 2.3005942261061594, + "learning_rate": 2.7268405896713342e-06, + "loss": 0.5277, + "step": 5315 + }, + { + "epoch": 0.2193612280267393, + "grad_norm": 2.3894346825946764, + "learning_rate": 2.726725230971992e-06, + "loss": 0.5113, + "step": 5316 + }, + { + "epoch": 0.2194024923660972, + "grad_norm": 12.939074257272164, + "learning_rate": 2.7266098503602683e-06, + "loss": 0.5356, + "step": 5317 + }, + { + "epoch": 0.21944375670545516, + "grad_norm": 7.6800433357654345, + "learning_rate": 2.7264944478382237e-06, + "loss": 0.5424, + "step": 5318 + }, + { + "epoch": 0.21948502104481307, + "grad_norm": 3.206587519219243, + "learning_rate": 2.726379023407921e-06, + "loss": 0.5853, + "step": 5319 + }, + { + "epoch": 0.219526285384171, + "grad_norm": 4.16918666788814, + "learning_rate": 2.7262635770714205e-06, + "loss": 0.6089, + "step": 5320 + }, + { + "epoch": 0.21956754972352893, + "grad_norm": 2.4101767783984194, + "learning_rate": 2.726148108830785e-06, + "loss": 0.5201, + "step": 5321 + }, + { + "epoch": 0.21960881406288685, + "grad_norm": 2.685952757301672, + "learning_rate": 2.7260326186880764e-06, + "loss": 0.5245, + "step": 5322 + }, + { + "epoch": 0.2196500784022448, + "grad_norm": 3.6223068025639193, + "learning_rate": 2.7259171066453587e-06, + "loss": 0.5419, + "step": 5323 + }, + { + "epoch": 0.2196913427416027, + "grad_norm": 5.705041334758281, + "learning_rate": 2.725801572704694e-06, + "loss": 0.5584, + "step": 5324 + }, + { + "epoch": 0.21973260708096062, + "grad_norm": 2.0382998944947106, + "learning_rate": 2.7256860168681475e-06, + "loss": 0.5728, + "step": 5325 + }, + { + "epoch": 0.21977387142031857, + "grad_norm": 4.649591334909118, + "learning_rate": 2.7255704391377824e-06, + "loss": 0.6186, + "step": 5326 + }, + { + "epoch": 0.21981513575967648, + "grad_norm": 3.2543601084151628, + "learning_rate": 2.725454839515663e-06, + "loss": 0.5891, + "step": 5327 + }, + { + "epoch": 0.21985640009903443, + "grad_norm": 2.8441678042082996, + "learning_rate": 2.7253392180038546e-06, + "loss": 0.5583, + "step": 5328 + }, + { + "epoch": 0.21989766443839234, + "grad_norm": 25.834797591056837, + "learning_rate": 2.7252235746044227e-06, + "loss": 0.5886, + "step": 5329 + }, + { + "epoch": 0.21993892877775026, + "grad_norm": 2.1941981012712612, + "learning_rate": 2.725107909319433e-06, + "loss": 0.5694, + "step": 5330 + }, + { + "epoch": 0.2199801931171082, + "grad_norm": 3.3425625700977433, + "learning_rate": 2.7249922221509507e-06, + "loss": 0.5529, + "step": 5331 + }, + { + "epoch": 0.22002145745646612, + "grad_norm": 4.135701669084892, + "learning_rate": 2.724876513101043e-06, + "loss": 0.547, + "step": 5332 + }, + { + "epoch": 0.22006272179582406, + "grad_norm": 1.809307294834948, + "learning_rate": 2.724760782171777e-06, + "loss": 0.5329, + "step": 5333 + }, + { + "epoch": 0.22010398613518198, + "grad_norm": 1.7704818863304155, + "learning_rate": 2.724645029365219e-06, + "loss": 0.5171, + "step": 5334 + }, + { + "epoch": 0.2201452504745399, + "grad_norm": 3.8639147549169333, + "learning_rate": 2.7245292546834374e-06, + "loss": 0.5495, + "step": 5335 + }, + { + "epoch": 0.22018651481389784, + "grad_norm": 2.549208118996775, + "learning_rate": 2.7244134581285e-06, + "loss": 0.6138, + "step": 5336 + }, + { + "epoch": 0.22022777915325575, + "grad_norm": 2.472402944006269, + "learning_rate": 2.7242976397024753e-06, + "loss": 0.5301, + "step": 5337 + }, + { + "epoch": 0.22026904349261367, + "grad_norm": 3.60115368801028, + "learning_rate": 2.724181799407432e-06, + "loss": 0.5406, + "step": 5338 + }, + { + "epoch": 0.2203103078319716, + "grad_norm": 3.4096647535227103, + "learning_rate": 2.7240659372454395e-06, + "loss": 0.6064, + "step": 5339 + }, + { + "epoch": 0.22035157217132953, + "grad_norm": 3.233283576628211, + "learning_rate": 2.723950053218567e-06, + "loss": 0.5639, + "step": 5340 + }, + { + "epoch": 0.22039283651068747, + "grad_norm": 2.8919098717254004, + "learning_rate": 2.7238341473288846e-06, + "loss": 0.5718, + "step": 5341 + }, + { + "epoch": 0.2204341008500454, + "grad_norm": 4.213421910300827, + "learning_rate": 2.723718219578463e-06, + "loss": 0.5933, + "step": 5342 + }, + { + "epoch": 0.2204753651894033, + "grad_norm": 15.798418663105647, + "learning_rate": 2.723602269969372e-06, + "loss": 0.5564, + "step": 5343 + }, + { + "epoch": 0.22051662952876125, + "grad_norm": 2.508908428842884, + "learning_rate": 2.7234862985036842e-06, + "loss": 0.5636, + "step": 5344 + }, + { + "epoch": 0.22055789386811916, + "grad_norm": 3.053268445701213, + "learning_rate": 2.723370305183471e-06, + "loss": 0.5799, + "step": 5345 + }, + { + "epoch": 0.2205991582074771, + "grad_norm": 3.04337448333138, + "learning_rate": 2.723254290010803e-06, + "loss": 0.5538, + "step": 5346 + }, + { + "epoch": 0.22064042254683502, + "grad_norm": 7.038622708043197, + "learning_rate": 2.723138252987753e-06, + "loss": 0.5792, + "step": 5347 + }, + { + "epoch": 0.22068168688619294, + "grad_norm": 2.9013147446659326, + "learning_rate": 2.7230221941163945e-06, + "loss": 0.5023, + "step": 5348 + }, + { + "epoch": 0.22072295122555088, + "grad_norm": 3.1860626014317837, + "learning_rate": 2.7229061133988e-06, + "loss": 0.612, + "step": 5349 + }, + { + "epoch": 0.2207642155649088, + "grad_norm": 6.796354871298217, + "learning_rate": 2.7227900108370426e-06, + "loss": 0.5535, + "step": 5350 + }, + { + "epoch": 0.22080547990426674, + "grad_norm": 3.3477946661260303, + "learning_rate": 2.722673886433197e-06, + "loss": 0.5664, + "step": 5351 + }, + { + "epoch": 0.22084674424362466, + "grad_norm": 2.623712661713723, + "learning_rate": 2.7225577401893373e-06, + "loss": 0.5635, + "step": 5352 + }, + { + "epoch": 0.22088800858298258, + "grad_norm": 5.667550288249555, + "learning_rate": 2.7224415721075376e-06, + "loss": 0.5657, + "step": 5353 + }, + { + "epoch": 0.22092927292234052, + "grad_norm": 2.753322082907262, + "learning_rate": 2.722325382189874e-06, + "loss": 0.4975, + "step": 5354 + }, + { + "epoch": 0.22097053726169844, + "grad_norm": 3.7224047966146174, + "learning_rate": 2.722209170438421e-06, + "loss": 0.4632, + "step": 5355 + }, + { + "epoch": 0.22101180160105638, + "grad_norm": 2.2827087147055662, + "learning_rate": 2.722092936855254e-06, + "loss": 0.5333, + "step": 5356 + }, + { + "epoch": 0.2210530659404143, + "grad_norm": 5.638325096039393, + "learning_rate": 2.7219766814424506e-06, + "loss": 0.5629, + "step": 5357 + }, + { + "epoch": 0.2210943302797722, + "grad_norm": 4.500197921966973, + "learning_rate": 2.7218604042020865e-06, + "loss": 0.5757, + "step": 5358 + }, + { + "epoch": 0.22113559461913015, + "grad_norm": 2.176786732138929, + "learning_rate": 2.7217441051362392e-06, + "loss": 0.5938, + "step": 5359 + }, + { + "epoch": 0.22117685895848807, + "grad_norm": 2.825216552395488, + "learning_rate": 2.721627784246986e-06, + "loss": 0.5241, + "step": 5360 + }, + { + "epoch": 0.22121812329784601, + "grad_norm": 3.1128772908256854, + "learning_rate": 2.721511441536404e-06, + "loss": 0.605, + "step": 5361 + }, + { + "epoch": 0.22125938763720393, + "grad_norm": 3.213908691295237, + "learning_rate": 2.7213950770065723e-06, + "loss": 0.5332, + "step": 5362 + }, + { + "epoch": 0.22130065197656185, + "grad_norm": 4.391565146594457, + "learning_rate": 2.721278690659569e-06, + "loss": 0.4784, + "step": 5363 + }, + { + "epoch": 0.2213419163159198, + "grad_norm": 4.00735210520296, + "learning_rate": 2.721162282497473e-06, + "loss": 0.5286, + "step": 5364 + }, + { + "epoch": 0.2213831806552777, + "grad_norm": 2.597944385673758, + "learning_rate": 2.7210458525223637e-06, + "loss": 0.5409, + "step": 5365 + }, + { + "epoch": 0.22142444499463565, + "grad_norm": 2.4546915154204747, + "learning_rate": 2.720929400736321e-06, + "loss": 0.5721, + "step": 5366 + }, + { + "epoch": 0.22146570933399357, + "grad_norm": 3.381238259867935, + "learning_rate": 2.7208129271414247e-06, + "loss": 0.6061, + "step": 5367 + }, + { + "epoch": 0.22150697367335148, + "grad_norm": 7.668604513729705, + "learning_rate": 2.7206964317397557e-06, + "loss": 0.5444, + "step": 5368 + }, + { + "epoch": 0.22154823801270943, + "grad_norm": 3.3791037537821085, + "learning_rate": 2.720579914533395e-06, + "loss": 0.6193, + "step": 5369 + }, + { + "epoch": 0.22158950235206734, + "grad_norm": 11.96249179077948, + "learning_rate": 2.7204633755244234e-06, + "loss": 0.5405, + "step": 5370 + }, + { + "epoch": 0.22163076669142526, + "grad_norm": 3.8133802746713825, + "learning_rate": 2.7203468147149227e-06, + "loss": 0.5264, + "step": 5371 + }, + { + "epoch": 0.2216720310307832, + "grad_norm": 3.2215013871034595, + "learning_rate": 2.7202302321069745e-06, + "loss": 0.5392, + "step": 5372 + }, + { + "epoch": 0.22171329537014112, + "grad_norm": 2.92668421057239, + "learning_rate": 2.7201136277026625e-06, + "loss": 0.5606, + "step": 5373 + }, + { + "epoch": 0.22175455970949906, + "grad_norm": 2.5096931067057264, + "learning_rate": 2.719997001504069e-06, + "loss": 0.5074, + "step": 5374 + }, + { + "epoch": 0.22179582404885698, + "grad_norm": 2.4649844198659734, + "learning_rate": 2.7198803535132768e-06, + "loss": 0.5421, + "step": 5375 + }, + { + "epoch": 0.2218370883882149, + "grad_norm": 4.761292315904064, + "learning_rate": 2.71976368373237e-06, + "loss": 0.5436, + "step": 5376 + }, + { + "epoch": 0.22187835272757284, + "grad_norm": 2.852179476252425, + "learning_rate": 2.719646992163432e-06, + "loss": 0.5773, + "step": 5377 + }, + { + "epoch": 0.22191961706693075, + "grad_norm": 2.2502902799150895, + "learning_rate": 2.719530278808548e-06, + "loss": 0.5851, + "step": 5378 + }, + { + "epoch": 0.2219608814062887, + "grad_norm": 3.554281974769343, + "learning_rate": 2.719413543669802e-06, + "loss": 0.5583, + "step": 5379 + }, + { + "epoch": 0.2220021457456466, + "grad_norm": 3.981650384193682, + "learning_rate": 2.71929678674928e-06, + "loss": 0.5995, + "step": 5380 + }, + { + "epoch": 0.22204341008500453, + "grad_norm": 3.623780746197079, + "learning_rate": 2.7191800080490674e-06, + "loss": 0.5168, + "step": 5381 + }, + { + "epoch": 0.22208467442436247, + "grad_norm": 3.8094089829609667, + "learning_rate": 2.719063207571249e-06, + "loss": 0.6055, + "step": 5382 + }, + { + "epoch": 0.2221259387637204, + "grad_norm": 2.9651721698492857, + "learning_rate": 2.718946385317913e-06, + "loss": 0.5541, + "step": 5383 + }, + { + "epoch": 0.22216720310307833, + "grad_norm": 2.6597330548426976, + "learning_rate": 2.718829541291145e-06, + "loss": 0.6267, + "step": 5384 + }, + { + "epoch": 0.22220846744243625, + "grad_norm": 3.0905738354051113, + "learning_rate": 2.7187126754930323e-06, + "loss": 0.5605, + "step": 5385 + }, + { + "epoch": 0.22224973178179416, + "grad_norm": 3.0805911640629664, + "learning_rate": 2.7185957879256623e-06, + "loss": 0.4856, + "step": 5386 + }, + { + "epoch": 0.2222909961211521, + "grad_norm": 7.191382980049549, + "learning_rate": 2.7184788785911237e-06, + "loss": 0.5705, + "step": 5387 + }, + { + "epoch": 0.22233226046051002, + "grad_norm": 4.340605056985127, + "learning_rate": 2.7183619474915034e-06, + "loss": 0.5436, + "step": 5388 + }, + { + "epoch": 0.22237352479986797, + "grad_norm": 3.8565882413550074, + "learning_rate": 2.718244994628891e-06, + "loss": 0.5603, + "step": 5389 + }, + { + "epoch": 0.22241478913922588, + "grad_norm": 10.691466107365473, + "learning_rate": 2.7181280200053756e-06, + "loss": 0.572, + "step": 5390 + }, + { + "epoch": 0.2224560534785838, + "grad_norm": 3.8585226693312977, + "learning_rate": 2.7180110236230464e-06, + "loss": 0.5226, + "step": 5391 + }, + { + "epoch": 0.22249731781794174, + "grad_norm": 3.3787799094471813, + "learning_rate": 2.7178940054839934e-06, + "loss": 0.5342, + "step": 5392 + }, + { + "epoch": 0.22253858215729966, + "grad_norm": 4.245903048798248, + "learning_rate": 2.717776965590307e-06, + "loss": 0.5113, + "step": 5393 + }, + { + "epoch": 0.2225798464966576, + "grad_norm": 2.938998572337085, + "learning_rate": 2.717659903944077e-06, + "loss": 0.5605, + "step": 5394 + }, + { + "epoch": 0.22262111083601552, + "grad_norm": 3.7255422264310756, + "learning_rate": 2.717542820547395e-06, + "loss": 0.5267, + "step": 5395 + }, + { + "epoch": 0.22266237517537343, + "grad_norm": 3.683201421265645, + "learning_rate": 2.7174257154023527e-06, + "loss": 0.598, + "step": 5396 + }, + { + "epoch": 0.22270363951473138, + "grad_norm": 2.655341975662013, + "learning_rate": 2.7173085885110417e-06, + "loss": 0.5706, + "step": 5397 + }, + { + "epoch": 0.2227449038540893, + "grad_norm": 3.4036842292004295, + "learning_rate": 2.717191439875554e-06, + "loss": 0.5162, + "step": 5398 + }, + { + "epoch": 0.2227861681934472, + "grad_norm": 2.6474322825651107, + "learning_rate": 2.717074269497982e-06, + "loss": 0.5748, + "step": 5399 + }, + { + "epoch": 0.22282743253280515, + "grad_norm": 3.2951550377222385, + "learning_rate": 2.716957077380419e-06, + "loss": 0.5944, + "step": 5400 + }, + { + "epoch": 0.22286869687216307, + "grad_norm": 2.6379625181298016, + "learning_rate": 2.7168398635249583e-06, + "loss": 0.5274, + "step": 5401 + }, + { + "epoch": 0.222909961211521, + "grad_norm": 2.585704660184201, + "learning_rate": 2.7167226279336935e-06, + "loss": 0.5711, + "step": 5402 + }, + { + "epoch": 0.22295122555087893, + "grad_norm": 3.0999424147929946, + "learning_rate": 2.716605370608719e-06, + "loss": 0.5854, + "step": 5403 + }, + { + "epoch": 0.22299248989023684, + "grad_norm": 2.9493658200314723, + "learning_rate": 2.716488091552129e-06, + "loss": 0.4817, + "step": 5404 + }, + { + "epoch": 0.2230337542295948, + "grad_norm": 2.700495841385269, + "learning_rate": 2.7163707907660186e-06, + "loss": 0.5394, + "step": 5405 + }, + { + "epoch": 0.2230750185689527, + "grad_norm": 5.1848501608216635, + "learning_rate": 2.7162534682524822e-06, + "loss": 0.6094, + "step": 5406 + }, + { + "epoch": 0.22311628290831065, + "grad_norm": 3.202979581115835, + "learning_rate": 2.716136124013617e-06, + "loss": 0.5723, + "step": 5407 + }, + { + "epoch": 0.22315754724766856, + "grad_norm": 2.924213346824425, + "learning_rate": 2.7160187580515186e-06, + "loss": 0.5672, + "step": 5408 + }, + { + "epoch": 0.22319881158702648, + "grad_norm": 7.261426984178607, + "learning_rate": 2.7159013703682825e-06, + "loss": 0.5238, + "step": 5409 + }, + { + "epoch": 0.22324007592638442, + "grad_norm": 4.377494769591876, + "learning_rate": 2.7157839609660065e-06, + "loss": 0.5645, + "step": 5410 + }, + { + "epoch": 0.22328134026574234, + "grad_norm": 4.643838368564887, + "learning_rate": 2.715666529846788e-06, + "loss": 0.5601, + "step": 5411 + }, + { + "epoch": 0.22332260460510028, + "grad_norm": 3.4175161285106457, + "learning_rate": 2.715549077012723e-06, + "loss": 0.5694, + "step": 5412 + }, + { + "epoch": 0.2233638689444582, + "grad_norm": 2.339060515018584, + "learning_rate": 2.7154316024659117e-06, + "loss": 0.6319, + "step": 5413 + }, + { + "epoch": 0.22340513328381612, + "grad_norm": 4.288282888267968, + "learning_rate": 2.7153141062084508e-06, + "loss": 0.6182, + "step": 5414 + }, + { + "epoch": 0.22344639762317406, + "grad_norm": 2.5024877265181824, + "learning_rate": 2.71519658824244e-06, + "loss": 0.6368, + "step": 5415 + }, + { + "epoch": 0.22348766196253197, + "grad_norm": 4.167897111372908, + "learning_rate": 2.715079048569978e-06, + "loss": 0.6026, + "step": 5416 + }, + { + "epoch": 0.22352892630188992, + "grad_norm": 2.9368187861708885, + "learning_rate": 2.714961487193165e-06, + "loss": 0.5588, + "step": 5417 + }, + { + "epoch": 0.22357019064124783, + "grad_norm": 3.403882842178314, + "learning_rate": 2.7148439041141e-06, + "loss": 0.5136, + "step": 5418 + }, + { + "epoch": 0.22361145498060575, + "grad_norm": 6.4881307624138795, + "learning_rate": 2.7147262993348836e-06, + "loss": 0.5582, + "step": 5419 + }, + { + "epoch": 0.2236527193199637, + "grad_norm": 4.043157215625413, + "learning_rate": 2.7146086728576174e-06, + "loss": 0.5825, + "step": 5420 + }, + { + "epoch": 0.2236939836593216, + "grad_norm": 4.888587518935057, + "learning_rate": 2.7144910246844014e-06, + "loss": 0.5173, + "step": 5421 + }, + { + "epoch": 0.22373524799867955, + "grad_norm": 3.0605344393820983, + "learning_rate": 2.714373354817337e-06, + "loss": 0.512, + "step": 5422 + }, + { + "epoch": 0.22377651233803747, + "grad_norm": 2.743662970306256, + "learning_rate": 2.714255663258528e-06, + "loss": 0.5712, + "step": 5423 + }, + { + "epoch": 0.22381777667739539, + "grad_norm": 3.0342973297043923, + "learning_rate": 2.714137950010074e-06, + "loss": 0.5649, + "step": 5424 + }, + { + "epoch": 0.22385904101675333, + "grad_norm": 8.853417243458985, + "learning_rate": 2.7140202150740793e-06, + "loss": 0.5922, + "step": 5425 + }, + { + "epoch": 0.22390030535611125, + "grad_norm": 3.5482894906932634, + "learning_rate": 2.7139024584526466e-06, + "loss": 0.5551, + "step": 5426 + }, + { + "epoch": 0.2239415696954692, + "grad_norm": 2.5901481550636163, + "learning_rate": 2.7137846801478793e-06, + "loss": 0.4921, + "step": 5427 + }, + { + "epoch": 0.2239828340348271, + "grad_norm": 4.214473256961926, + "learning_rate": 2.713666880161881e-06, + "loss": 0.5362, + "step": 5428 + }, + { + "epoch": 0.22402409837418502, + "grad_norm": 2.0843036679250413, + "learning_rate": 2.713549058496756e-06, + "loss": 0.4878, + "step": 5429 + }, + { + "epoch": 0.22406536271354296, + "grad_norm": 8.331502623387951, + "learning_rate": 2.7134312151546093e-06, + "loss": 0.5809, + "step": 5430 + }, + { + "epoch": 0.22410662705290088, + "grad_norm": 11.854291929141999, + "learning_rate": 2.7133133501375457e-06, + "loss": 0.5531, + "step": 5431 + }, + { + "epoch": 0.2241478913922588, + "grad_norm": 2.914421501589817, + "learning_rate": 2.71319546344767e-06, + "loss": 0.6054, + "step": 5432 + }, + { + "epoch": 0.22418915573161674, + "grad_norm": 3.6007601253955497, + "learning_rate": 2.713077555087089e-06, + "loss": 0.5055, + "step": 5433 + }, + { + "epoch": 0.22423042007097466, + "grad_norm": 4.600665503286852, + "learning_rate": 2.7129596250579075e-06, + "loss": 0.5934, + "step": 5434 + }, + { + "epoch": 0.2242716844103326, + "grad_norm": 2.4708279289858304, + "learning_rate": 2.7128416733622328e-06, + "loss": 0.6349, + "step": 5435 + }, + { + "epoch": 0.22431294874969052, + "grad_norm": 2.6106027061730965, + "learning_rate": 2.7127237000021717e-06, + "loss": 0.5347, + "step": 5436 + }, + { + "epoch": 0.22435421308904843, + "grad_norm": 7.625104631628316, + "learning_rate": 2.712605704979832e-06, + "loss": 0.5869, + "step": 5437 + }, + { + "epoch": 0.22439547742840638, + "grad_norm": 2.4112450634306164, + "learning_rate": 2.712487688297321e-06, + "loss": 0.514, + "step": 5438 + }, + { + "epoch": 0.2244367417677643, + "grad_norm": 3.107907547170841, + "learning_rate": 2.7123696499567457e-06, + "loss": 0.5003, + "step": 5439 + }, + { + "epoch": 0.22447800610712224, + "grad_norm": 5.282426631709483, + "learning_rate": 2.7122515899602168e-06, + "loss": 0.5368, + "step": 5440 + }, + { + "epoch": 0.22451927044648015, + "grad_norm": 3.6007472251104415, + "learning_rate": 2.712133508309841e-06, + "loss": 0.5453, + "step": 5441 + }, + { + "epoch": 0.22456053478583807, + "grad_norm": 3.0369713848932243, + "learning_rate": 2.712015405007729e-06, + "loss": 0.5262, + "step": 5442 + }, + { + "epoch": 0.224601799125196, + "grad_norm": 5.791791740474824, + "learning_rate": 2.71189728005599e-06, + "loss": 0.6191, + "step": 5443 + }, + { + "epoch": 0.22464306346455393, + "grad_norm": 2.9580788524604413, + "learning_rate": 2.7117791334567333e-06, + "loss": 0.5609, + "step": 5444 + }, + { + "epoch": 0.22468432780391187, + "grad_norm": 2.504286084644343, + "learning_rate": 2.7116609652120703e-06, + "loss": 0.5336, + "step": 5445 + }, + { + "epoch": 0.2247255921432698, + "grad_norm": 2.4560184352543497, + "learning_rate": 2.7115427753241113e-06, + "loss": 0.5067, + "step": 5446 + }, + { + "epoch": 0.2247668564826277, + "grad_norm": 5.273977336863417, + "learning_rate": 2.7114245637949674e-06, + "loss": 0.5448, + "step": 5447 + }, + { + "epoch": 0.22480812082198565, + "grad_norm": 5.1373560629992365, + "learning_rate": 2.7113063306267502e-06, + "loss": 0.496, + "step": 5448 + }, + { + "epoch": 0.22484938516134356, + "grad_norm": 2.9917558660267214, + "learning_rate": 2.711188075821572e-06, + "loss": 0.5317, + "step": 5449 + }, + { + "epoch": 0.2248906495007015, + "grad_norm": 2.051544060084158, + "learning_rate": 2.7110697993815447e-06, + "loss": 0.5215, + "step": 5450 + }, + { + "epoch": 0.22493191384005942, + "grad_norm": 2.6292204105115737, + "learning_rate": 2.710951501308781e-06, + "loss": 0.594, + "step": 5451 + }, + { + "epoch": 0.22497317817941734, + "grad_norm": 3.4353227202730077, + "learning_rate": 2.710833181605394e-06, + "loss": 0.5362, + "step": 5452 + }, + { + "epoch": 0.22501444251877528, + "grad_norm": 5.203454629864616, + "learning_rate": 2.710714840273498e-06, + "loss": 0.5835, + "step": 5453 + }, + { + "epoch": 0.2250557068581332, + "grad_norm": 9.340092657231132, + "learning_rate": 2.7105964773152053e-06, + "loss": 0.5748, + "step": 5454 + }, + { + "epoch": 0.22509697119749114, + "grad_norm": 2.788901388076959, + "learning_rate": 2.7104780927326317e-06, + "loss": 0.5225, + "step": 5455 + }, + { + "epoch": 0.22513823553684906, + "grad_norm": 2.574612657073146, + "learning_rate": 2.7103596865278913e-06, + "loss": 0.564, + "step": 5456 + }, + { + "epoch": 0.22517949987620697, + "grad_norm": 3.4896132975437797, + "learning_rate": 2.7102412587030986e-06, + "loss": 0.5689, + "step": 5457 + }, + { + "epoch": 0.22522076421556492, + "grad_norm": 3.8993522158450733, + "learning_rate": 2.7101228092603696e-06, + "loss": 0.5888, + "step": 5458 + }, + { + "epoch": 0.22526202855492283, + "grad_norm": 3.171544506218599, + "learning_rate": 2.71000433820182e-06, + "loss": 0.552, + "step": 5459 + }, + { + "epoch": 0.22530329289428075, + "grad_norm": 4.106302393883121, + "learning_rate": 2.709885845529566e-06, + "loss": 0.5473, + "step": 5460 + }, + { + "epoch": 0.2253445572336387, + "grad_norm": 3.972555421946089, + "learning_rate": 2.7097673312457243e-06, + "loss": 0.6458, + "step": 5461 + }, + { + "epoch": 0.2253858215729966, + "grad_norm": 5.954828001420881, + "learning_rate": 2.7096487953524114e-06, + "loss": 0.5546, + "step": 5462 + }, + { + "epoch": 0.22542708591235455, + "grad_norm": 4.2107513725560874, + "learning_rate": 2.7095302378517454e-06, + "loss": 0.5547, + "step": 5463 + }, + { + "epoch": 0.22546835025171247, + "grad_norm": 5.895909153980852, + "learning_rate": 2.709411658745843e-06, + "loss": 0.5381, + "step": 5464 + }, + { + "epoch": 0.22550961459107038, + "grad_norm": 2.6372208585257213, + "learning_rate": 2.709293058036823e-06, + "loss": 0.5156, + "step": 5465 + }, + { + "epoch": 0.22555087893042833, + "grad_norm": 3.6835970018839657, + "learning_rate": 2.7091744357268038e-06, + "loss": 0.604, + "step": 5466 + }, + { + "epoch": 0.22559214326978624, + "grad_norm": 3.9795828855809305, + "learning_rate": 2.7090557918179046e-06, + "loss": 0.5494, + "step": 5467 + }, + { + "epoch": 0.2256334076091442, + "grad_norm": 3.3405268967719968, + "learning_rate": 2.708937126312244e-06, + "loss": 0.5628, + "step": 5468 + }, + { + "epoch": 0.2256746719485021, + "grad_norm": 3.1642023775152626, + "learning_rate": 2.7088184392119423e-06, + "loss": 0.574, + "step": 5469 + }, + { + "epoch": 0.22571593628786002, + "grad_norm": 3.4403887317134254, + "learning_rate": 2.708699730519119e-06, + "loss": 0.5555, + "step": 5470 + }, + { + "epoch": 0.22575720062721796, + "grad_norm": 11.483707367772146, + "learning_rate": 2.7085810002358948e-06, + "loss": 0.497, + "step": 5471 + }, + { + "epoch": 0.22579846496657588, + "grad_norm": 10.098363698783492, + "learning_rate": 2.708462248364391e-06, + "loss": 0.5033, + "step": 5472 + }, + { + "epoch": 0.22583972930593382, + "grad_norm": 6.065171789204813, + "learning_rate": 2.7083434749067284e-06, + "loss": 0.5901, + "step": 5473 + }, + { + "epoch": 0.22588099364529174, + "grad_norm": 3.306845928723906, + "learning_rate": 2.7082246798650278e-06, + "loss": 0.5941, + "step": 5474 + }, + { + "epoch": 0.22592225798464965, + "grad_norm": 5.417565209247478, + "learning_rate": 2.708105863241412e-06, + "loss": 0.5623, + "step": 5475 + }, + { + "epoch": 0.2259635223240076, + "grad_norm": 2.6299812172887704, + "learning_rate": 2.707987025038004e-06, + "loss": 0.5276, + "step": 5476 + }, + { + "epoch": 0.22600478666336551, + "grad_norm": 3.342340624372662, + "learning_rate": 2.7078681652569254e-06, + "loss": 0.5826, + "step": 5477 + }, + { + "epoch": 0.22604605100272346, + "grad_norm": 4.564499163400271, + "learning_rate": 2.707749283900299e-06, + "loss": 0.5411, + "step": 5478 + }, + { + "epoch": 0.22608731534208137, + "grad_norm": 6.886888090255303, + "learning_rate": 2.7076303809702498e-06, + "loss": 0.4871, + "step": 5479 + }, + { + "epoch": 0.2261285796814393, + "grad_norm": 2.2844081032576544, + "learning_rate": 2.7075114564689013e-06, + "loss": 0.5637, + "step": 5480 + }, + { + "epoch": 0.22616984402079723, + "grad_norm": 2.94685435440165, + "learning_rate": 2.707392510398377e-06, + "loss": 0.5866, + "step": 5481 + }, + { + "epoch": 0.22621110836015515, + "grad_norm": 3.028508468027017, + "learning_rate": 2.7072735427608023e-06, + "loss": 0.5152, + "step": 5482 + }, + { + "epoch": 0.2262523726995131, + "grad_norm": 3.462734785298202, + "learning_rate": 2.7071545535583013e-06, + "loss": 0.5857, + "step": 5483 + }, + { + "epoch": 0.226293637038871, + "grad_norm": 4.318611140476542, + "learning_rate": 2.7070355427930008e-06, + "loss": 0.5058, + "step": 5484 + }, + { + "epoch": 0.22633490137822893, + "grad_norm": 3.9994015038187807, + "learning_rate": 2.706916510467026e-06, + "loss": 0.5485, + "step": 5485 + }, + { + "epoch": 0.22637616571758687, + "grad_norm": 3.1791094658814236, + "learning_rate": 2.7067974565825026e-06, + "loss": 0.5058, + "step": 5486 + }, + { + "epoch": 0.22641743005694479, + "grad_norm": 4.051065156394139, + "learning_rate": 2.706678381141558e-06, + "loss": 0.5123, + "step": 5487 + }, + { + "epoch": 0.22645869439630273, + "grad_norm": 3.4618852064396552, + "learning_rate": 2.7065592841463188e-06, + "loss": 0.5567, + "step": 5488 + }, + { + "epoch": 0.22649995873566064, + "grad_norm": 4.516293355242854, + "learning_rate": 2.706440165598913e-06, + "loss": 0.5806, + "step": 5489 + }, + { + "epoch": 0.22654122307501856, + "grad_norm": 2.6589142501758247, + "learning_rate": 2.7063210255014665e-06, + "loss": 0.5475, + "step": 5490 + }, + { + "epoch": 0.2265824874143765, + "grad_norm": 12.735845737151426, + "learning_rate": 2.7062018638561094e-06, + "loss": 0.5643, + "step": 5491 + }, + { + "epoch": 0.22662375175373442, + "grad_norm": 10.913715688146493, + "learning_rate": 2.70608268066497e-06, + "loss": 0.5374, + "step": 5492 + }, + { + "epoch": 0.22666501609309234, + "grad_norm": 6.649410366493621, + "learning_rate": 2.705963475930176e-06, + "loss": 0.5358, + "step": 5493 + }, + { + "epoch": 0.22670628043245028, + "grad_norm": 4.549731377808013, + "learning_rate": 2.705844249653858e-06, + "loss": 0.5474, + "step": 5494 + }, + { + "epoch": 0.2267475447718082, + "grad_norm": 7.203432541942861, + "learning_rate": 2.705725001838145e-06, + "loss": 0.5239, + "step": 5495 + }, + { + "epoch": 0.22678880911116614, + "grad_norm": 3.1630219903234598, + "learning_rate": 2.7056057324851674e-06, + "loss": 0.571, + "step": 5496 + }, + { + "epoch": 0.22683007345052406, + "grad_norm": 3.925326788683587, + "learning_rate": 2.7054864415970553e-06, + "loss": 0.5506, + "step": 5497 + }, + { + "epoch": 0.22687133778988197, + "grad_norm": 7.462699326640666, + "learning_rate": 2.7053671291759394e-06, + "loss": 0.5562, + "step": 5498 + }, + { + "epoch": 0.22691260212923992, + "grad_norm": 3.0203279861054164, + "learning_rate": 2.705247795223952e-06, + "loss": 0.5257, + "step": 5499 + }, + { + "epoch": 0.22695386646859783, + "grad_norm": 3.350712869185386, + "learning_rate": 2.705128439743223e-06, + "loss": 0.5224, + "step": 5500 + }, + { + "epoch": 0.22699513080795578, + "grad_norm": 2.382886306414392, + "learning_rate": 2.7050090627358857e-06, + "loss": 0.5372, + "step": 5501 + }, + { + "epoch": 0.2270363951473137, + "grad_norm": 2.783501903283959, + "learning_rate": 2.704889664204072e-06, + "loss": 0.5231, + "step": 5502 + }, + { + "epoch": 0.2270776594866716, + "grad_norm": 3.1972387260652275, + "learning_rate": 2.7047702441499146e-06, + "loss": 0.5993, + "step": 5503 + }, + { + "epoch": 0.22711892382602955, + "grad_norm": 5.285412768829862, + "learning_rate": 2.7046508025755468e-06, + "loss": 0.5478, + "step": 5504 + }, + { + "epoch": 0.22716018816538747, + "grad_norm": 2.4305427412568767, + "learning_rate": 2.7045313394831027e-06, + "loss": 0.5913, + "step": 5505 + }, + { + "epoch": 0.2272014525047454, + "grad_norm": 5.161127017360403, + "learning_rate": 2.7044118548747147e-06, + "loss": 0.6371, + "step": 5506 + }, + { + "epoch": 0.22724271684410333, + "grad_norm": 5.919166856284114, + "learning_rate": 2.7042923487525183e-06, + "loss": 0.5536, + "step": 5507 + }, + { + "epoch": 0.22728398118346124, + "grad_norm": 3.5781277916058305, + "learning_rate": 2.704172821118648e-06, + "loss": 0.506, + "step": 5508 + }, + { + "epoch": 0.22732524552281919, + "grad_norm": 3.9950948669623996, + "learning_rate": 2.704053271975239e-06, + "loss": 0.5603, + "step": 5509 + }, + { + "epoch": 0.2273665098621771, + "grad_norm": 9.055589051919823, + "learning_rate": 2.703933701324426e-06, + "loss": 0.5298, + "step": 5510 + }, + { + "epoch": 0.22740777420153505, + "grad_norm": 3.1682781902045054, + "learning_rate": 2.7038141091683458e-06, + "loss": 0.5839, + "step": 5511 + }, + { + "epoch": 0.22744903854089296, + "grad_norm": 3.57828447139121, + "learning_rate": 2.703694495509134e-06, + "loss": 0.5289, + "step": 5512 + }, + { + "epoch": 0.22749030288025088, + "grad_norm": 2.62202693793993, + "learning_rate": 2.703574860348927e-06, + "loss": 0.5881, + "step": 5513 + }, + { + "epoch": 0.22753156721960882, + "grad_norm": 3.5925281161427334, + "learning_rate": 2.703455203689862e-06, + "loss": 0.529, + "step": 5514 + }, + { + "epoch": 0.22757283155896674, + "grad_norm": 3.62428794011204, + "learning_rate": 2.7033355255340768e-06, + "loss": 0.5847, + "step": 5515 + }, + { + "epoch": 0.22761409589832468, + "grad_norm": 7.106810000606524, + "learning_rate": 2.703215825883709e-06, + "loss": 0.515, + "step": 5516 + }, + { + "epoch": 0.2276553602376826, + "grad_norm": 5.91678163316708, + "learning_rate": 2.703096104740896e-06, + "loss": 0.5804, + "step": 5517 + }, + { + "epoch": 0.2276966245770405, + "grad_norm": 3.8992764827964463, + "learning_rate": 2.702976362107777e-06, + "loss": 0.5917, + "step": 5518 + }, + { + "epoch": 0.22773788891639846, + "grad_norm": 2.8207399672122357, + "learning_rate": 2.702856597986492e-06, + "loss": 0.5299, + "step": 5519 + }, + { + "epoch": 0.22777915325575637, + "grad_norm": 5.517467399053912, + "learning_rate": 2.702736812379178e-06, + "loss": 0.5601, + "step": 5520 + }, + { + "epoch": 0.2278204175951143, + "grad_norm": 3.846693339289755, + "learning_rate": 2.7026170052879755e-06, + "loss": 0.535, + "step": 5521 + }, + { + "epoch": 0.22786168193447223, + "grad_norm": 5.424491867981619, + "learning_rate": 2.702497176715025e-06, + "loss": 0.5118, + "step": 5522 + }, + { + "epoch": 0.22790294627383015, + "grad_norm": 5.08603512154535, + "learning_rate": 2.702377326662467e-06, + "loss": 0.5473, + "step": 5523 + }, + { + "epoch": 0.2279442106131881, + "grad_norm": 4.125131027087063, + "learning_rate": 2.702257455132442e-06, + "loss": 0.5233, + "step": 5524 + }, + { + "epoch": 0.227985474952546, + "grad_norm": 3.182029338945923, + "learning_rate": 2.702137562127091e-06, + "loss": 0.5076, + "step": 5525 + }, + { + "epoch": 0.22802673929190392, + "grad_norm": 4.69967934456974, + "learning_rate": 2.702017647648556e-06, + "loss": 0.519, + "step": 5526 + }, + { + "epoch": 0.22806800363126187, + "grad_norm": 4.065800520598681, + "learning_rate": 2.701897711698979e-06, + "loss": 0.5445, + "step": 5527 + }, + { + "epoch": 0.22810926797061978, + "grad_norm": 5.899092569870941, + "learning_rate": 2.701777754280502e-06, + "loss": 0.562, + "step": 5528 + }, + { + "epoch": 0.22815053230997773, + "grad_norm": 5.73723525086251, + "learning_rate": 2.701657775395268e-06, + "loss": 0.5337, + "step": 5529 + }, + { + "epoch": 0.22819179664933564, + "grad_norm": 3.7869937113702252, + "learning_rate": 2.70153777504542e-06, + "loss": 0.5546, + "step": 5530 + }, + { + "epoch": 0.22823306098869356, + "grad_norm": 4.898451531766742, + "learning_rate": 2.7014177532331014e-06, + "loss": 0.5894, + "step": 5531 + }, + { + "epoch": 0.2282743253280515, + "grad_norm": 3.435566288033464, + "learning_rate": 2.7012977099604565e-06, + "loss": 0.5172, + "step": 5532 + }, + { + "epoch": 0.22831558966740942, + "grad_norm": 5.375070082164947, + "learning_rate": 2.701177645229629e-06, + "loss": 0.5569, + "step": 5533 + }, + { + "epoch": 0.22835685400676736, + "grad_norm": 10.607265532729835, + "learning_rate": 2.7010575590427643e-06, + "loss": 0.5986, + "step": 5534 + }, + { + "epoch": 0.22839811834612528, + "grad_norm": 2.8868279320175563, + "learning_rate": 2.7009374514020068e-06, + "loss": 0.5545, + "step": 5535 + }, + { + "epoch": 0.2284393826854832, + "grad_norm": 4.077586340749129, + "learning_rate": 2.700817322309502e-06, + "loss": 0.4652, + "step": 5536 + }, + { + "epoch": 0.22848064702484114, + "grad_norm": 7.039286406847648, + "learning_rate": 2.7006971717673964e-06, + "loss": 0.5546, + "step": 5537 + }, + { + "epoch": 0.22852191136419905, + "grad_norm": 4.049271650887437, + "learning_rate": 2.7005769997778347e-06, + "loss": 0.5916, + "step": 5538 + }, + { + "epoch": 0.228563175703557, + "grad_norm": 5.308719982746561, + "learning_rate": 2.7004568063429654e-06, + "loss": 0.5897, + "step": 5539 + }, + { + "epoch": 0.2286044400429149, + "grad_norm": 4.623090187542637, + "learning_rate": 2.700336591464934e-06, + "loss": 0.5934, + "step": 5540 + }, + { + "epoch": 0.22864570438227283, + "grad_norm": 4.273567725914525, + "learning_rate": 2.700216355145888e-06, + "loss": 0.535, + "step": 5541 + }, + { + "epoch": 0.22868696872163077, + "grad_norm": 10.187630536608816, + "learning_rate": 2.7000960973879757e-06, + "loss": 0.5326, + "step": 5542 + }, + { + "epoch": 0.2287282330609887, + "grad_norm": 12.754885370450985, + "learning_rate": 2.6999758181933447e-06, + "loss": 0.5743, + "step": 5543 + }, + { + "epoch": 0.22876949740034663, + "grad_norm": 5.114911341598784, + "learning_rate": 2.699855517564144e-06, + "loss": 0.544, + "step": 5544 + }, + { + "epoch": 0.22881076173970455, + "grad_norm": 4.120048540250468, + "learning_rate": 2.6997351955025222e-06, + "loss": 0.5067, + "step": 5545 + }, + { + "epoch": 0.22885202607906246, + "grad_norm": 3.817269135009382, + "learning_rate": 2.6996148520106286e-06, + "loss": 0.5452, + "step": 5546 + }, + { + "epoch": 0.2288932904184204, + "grad_norm": 6.057356356063641, + "learning_rate": 2.6994944870906124e-06, + "loss": 0.5482, + "step": 5547 + }, + { + "epoch": 0.22893455475777832, + "grad_norm": 3.2260070188163565, + "learning_rate": 2.699374100744624e-06, + "loss": 0.5351, + "step": 5548 + }, + { + "epoch": 0.22897581909713627, + "grad_norm": 4.561682159947677, + "learning_rate": 2.699253692974814e-06, + "loss": 0.5263, + "step": 5549 + }, + { + "epoch": 0.22901708343649418, + "grad_norm": 2.3525519112835047, + "learning_rate": 2.699133263783333e-06, + "loss": 0.5033, + "step": 5550 + }, + { + "epoch": 0.2290583477758521, + "grad_norm": 3.3264548585474687, + "learning_rate": 2.699012813172332e-06, + "loss": 0.5562, + "step": 5551 + }, + { + "epoch": 0.22909961211521004, + "grad_norm": 3.569610959616252, + "learning_rate": 2.6988923411439627e-06, + "loss": 0.5025, + "step": 5552 + }, + { + "epoch": 0.22914087645456796, + "grad_norm": 4.353464198660248, + "learning_rate": 2.698771847700377e-06, + "loss": 0.5447, + "step": 5553 + }, + { + "epoch": 0.22918214079392588, + "grad_norm": 3.4891690816446976, + "learning_rate": 2.698651332843727e-06, + "loss": 0.5637, + "step": 5554 + }, + { + "epoch": 0.22922340513328382, + "grad_norm": 2.417973930643734, + "learning_rate": 2.6985307965761666e-06, + "loss": 0.4758, + "step": 5555 + }, + { + "epoch": 0.22926466947264174, + "grad_norm": 2.734695821512242, + "learning_rate": 2.698410238899847e-06, + "loss": 0.5606, + "step": 5556 + }, + { + "epoch": 0.22930593381199968, + "grad_norm": 3.3760797714239157, + "learning_rate": 2.698289659816923e-06, + "loss": 0.5437, + "step": 5557 + }, + { + "epoch": 0.2293471981513576, + "grad_norm": 5.459302496257632, + "learning_rate": 2.6981690593295474e-06, + "loss": 0.5572, + "step": 5558 + }, + { + "epoch": 0.2293884624907155, + "grad_norm": 5.307834147428554, + "learning_rate": 2.6980484374398757e-06, + "loss": 0.5509, + "step": 5559 + }, + { + "epoch": 0.22942972683007345, + "grad_norm": 8.603010611174009, + "learning_rate": 2.6979277941500615e-06, + "loss": 0.5973, + "step": 5560 + }, + { + "epoch": 0.22947099116943137, + "grad_norm": 4.153673878311409, + "learning_rate": 2.69780712946226e-06, + "loss": 0.5233, + "step": 5561 + }, + { + "epoch": 0.22951225550878931, + "grad_norm": 4.1542975299461204, + "learning_rate": 2.697686443378627e-06, + "loss": 0.5854, + "step": 5562 + }, + { + "epoch": 0.22955351984814723, + "grad_norm": 2.0231355446448362, + "learning_rate": 2.697565735901318e-06, + "loss": 0.5302, + "step": 5563 + }, + { + "epoch": 0.22959478418750515, + "grad_norm": 3.4234259542147822, + "learning_rate": 2.697445007032489e-06, + "loss": 0.5199, + "step": 5564 + }, + { + "epoch": 0.2296360485268631, + "grad_norm": 3.675825740477217, + "learning_rate": 2.6973242567742964e-06, + "loss": 0.4937, + "step": 5565 + }, + { + "epoch": 0.229677312866221, + "grad_norm": 4.788561445054723, + "learning_rate": 2.697203485128898e-06, + "loss": 0.5253, + "step": 5566 + }, + { + "epoch": 0.22971857720557895, + "grad_norm": 2.3587029928870717, + "learning_rate": 2.6970826920984497e-06, + "loss": 0.5136, + "step": 5567 + }, + { + "epoch": 0.22975984154493687, + "grad_norm": 4.896959043487725, + "learning_rate": 2.69696187768511e-06, + "loss": 0.5282, + "step": 5568 + }, + { + "epoch": 0.22980110588429478, + "grad_norm": 9.098743455023921, + "learning_rate": 2.696841041891037e-06, + "loss": 0.5668, + "step": 5569 + }, + { + "epoch": 0.22984237022365273, + "grad_norm": 4.641909570592481, + "learning_rate": 2.6967201847183893e-06, + "loss": 0.5715, + "step": 5570 + }, + { + "epoch": 0.22988363456301064, + "grad_norm": 3.856903240991503, + "learning_rate": 2.696599306169325e-06, + "loss": 0.5313, + "step": 5571 + }, + { + "epoch": 0.22992489890236859, + "grad_norm": 4.312332205650851, + "learning_rate": 2.6964784062460035e-06, + "loss": 0.5901, + "step": 5572 + }, + { + "epoch": 0.2299661632417265, + "grad_norm": 3.587734465292258, + "learning_rate": 2.696357484950585e-06, + "loss": 0.5957, + "step": 5573 + }, + { + "epoch": 0.23000742758108442, + "grad_norm": 3.4822326222131097, + "learning_rate": 2.6962365422852285e-06, + "loss": 0.5391, + "step": 5574 + }, + { + "epoch": 0.23004869192044236, + "grad_norm": 3.2256046185103813, + "learning_rate": 2.696115578252095e-06, + "loss": 0.5152, + "step": 5575 + }, + { + "epoch": 0.23008995625980028, + "grad_norm": 3.556443848759825, + "learning_rate": 2.695994592853345e-06, + "loss": 0.5641, + "step": 5576 + }, + { + "epoch": 0.23013122059915822, + "grad_norm": 5.162597541766662, + "learning_rate": 2.69587358609114e-06, + "loss": 0.6072, + "step": 5577 + }, + { + "epoch": 0.23017248493851614, + "grad_norm": 8.382222836775492, + "learning_rate": 2.695752557967641e-06, + "loss": 0.578, + "step": 5578 + }, + { + "epoch": 0.23021374927787405, + "grad_norm": 3.005566452851104, + "learning_rate": 2.6956315084850094e-06, + "loss": 0.5537, + "step": 5579 + }, + { + "epoch": 0.230255013617232, + "grad_norm": 2.9503316025152846, + "learning_rate": 2.6955104376454085e-06, + "loss": 0.5667, + "step": 5580 + }, + { + "epoch": 0.2302962779565899, + "grad_norm": 10.34994996454267, + "learning_rate": 2.6953893454510004e-06, + "loss": 0.5589, + "step": 5581 + }, + { + "epoch": 0.23033754229594783, + "grad_norm": 2.454585109831602, + "learning_rate": 2.6952682319039484e-06, + "loss": 0.4761, + "step": 5582 + }, + { + "epoch": 0.23037880663530577, + "grad_norm": 3.7662587480164245, + "learning_rate": 2.6951470970064157e-06, + "loss": 0.6191, + "step": 5583 + }, + { + "epoch": 0.2304200709746637, + "grad_norm": 3.562921225731514, + "learning_rate": 2.695025940760566e-06, + "loss": 0.5595, + "step": 5584 + }, + { + "epoch": 0.23046133531402163, + "grad_norm": 6.36127011521881, + "learning_rate": 2.694904763168563e-06, + "loss": 0.6031, + "step": 5585 + }, + { + "epoch": 0.23050259965337955, + "grad_norm": 2.3590188339051856, + "learning_rate": 2.6947835642325725e-06, + "loss": 0.5615, + "step": 5586 + }, + { + "epoch": 0.23054386399273746, + "grad_norm": 3.6524991685863326, + "learning_rate": 2.6946623439547583e-06, + "loss": 0.627, + "step": 5587 + }, + { + "epoch": 0.2305851283320954, + "grad_norm": 2.8257983244244578, + "learning_rate": 2.694541102337286e-06, + "loss": 0.5428, + "step": 5588 + }, + { + "epoch": 0.23062639267145332, + "grad_norm": 2.581662246278243, + "learning_rate": 2.6944198393823215e-06, + "loss": 0.5713, + "step": 5589 + }, + { + "epoch": 0.23066765701081127, + "grad_norm": 3.7057692231716515, + "learning_rate": 2.6942985550920307e-06, + "loss": 0.557, + "step": 5590 + }, + { + "epoch": 0.23070892135016918, + "grad_norm": 3.8175118534826162, + "learning_rate": 2.69417724946858e-06, + "loss": 0.5434, + "step": 5591 + }, + { + "epoch": 0.2307501856895271, + "grad_norm": 3.861624027602934, + "learning_rate": 2.6940559225141356e-06, + "loss": 0.5797, + "step": 5592 + }, + { + "epoch": 0.23079145002888504, + "grad_norm": 7.506929705981698, + "learning_rate": 2.693934574230866e-06, + "loss": 0.6062, + "step": 5593 + }, + { + "epoch": 0.23083271436824296, + "grad_norm": 4.2457719195974395, + "learning_rate": 2.693813204620938e-06, + "loss": 0.4854, + "step": 5594 + }, + { + "epoch": 0.2308739787076009, + "grad_norm": 3.4672038222739587, + "learning_rate": 2.6936918136865193e-06, + "loss": 0.5549, + "step": 5595 + }, + { + "epoch": 0.23091524304695882, + "grad_norm": 2.8935298872936923, + "learning_rate": 2.6935704014297795e-06, + "loss": 0.5216, + "step": 5596 + }, + { + "epoch": 0.23095650738631673, + "grad_norm": 2.602719033212621, + "learning_rate": 2.6934489678528857e-06, + "loss": 0.5337, + "step": 5597 + }, + { + "epoch": 0.23099777172567468, + "grad_norm": 3.3142390635142362, + "learning_rate": 2.693327512958008e-06, + "loss": 0.5273, + "step": 5598 + }, + { + "epoch": 0.2310390360650326, + "grad_norm": 6.292089692737917, + "learning_rate": 2.6932060367473158e-06, + "loss": 0.5118, + "step": 5599 + }, + { + "epoch": 0.23108030040439054, + "grad_norm": 3.6954146326250847, + "learning_rate": 2.6930845392229793e-06, + "loss": 0.5536, + "step": 5600 + }, + { + "epoch": 0.23112156474374845, + "grad_norm": 3.302441045974613, + "learning_rate": 2.6929630203871677e-06, + "loss": 0.5848, + "step": 5601 + }, + { + "epoch": 0.23116282908310637, + "grad_norm": 3.7418662441796378, + "learning_rate": 2.6928414802420524e-06, + "loss": 0.5687, + "step": 5602 + }, + { + "epoch": 0.2312040934224643, + "grad_norm": 3.490162840462675, + "learning_rate": 2.692719918789804e-06, + "loss": 0.5893, + "step": 5603 + }, + { + "epoch": 0.23124535776182223, + "grad_norm": 4.382116069938932, + "learning_rate": 2.692598336032594e-06, + "loss": 0.5797, + "step": 5604 + }, + { + "epoch": 0.23128662210118017, + "grad_norm": 14.190866621502666, + "learning_rate": 2.692476731972595e-06, + "loss": 0.5727, + "step": 5605 + }, + { + "epoch": 0.2313278864405381, + "grad_norm": 5.615871166718995, + "learning_rate": 2.6923551066119777e-06, + "loss": 0.5999, + "step": 5606 + }, + { + "epoch": 0.231369150779896, + "grad_norm": 6.670632590173857, + "learning_rate": 2.6922334599529156e-06, + "loss": 0.5252, + "step": 5607 + }, + { + "epoch": 0.23141041511925395, + "grad_norm": 5.756318294148031, + "learning_rate": 2.6921117919975817e-06, + "loss": 0.5696, + "step": 5608 + }, + { + "epoch": 0.23145167945861186, + "grad_norm": 3.6415731447071478, + "learning_rate": 2.6919901027481487e-06, + "loss": 0.5592, + "step": 5609 + }, + { + "epoch": 0.2314929437979698, + "grad_norm": 3.173515911882615, + "learning_rate": 2.691868392206791e-06, + "loss": 0.5749, + "step": 5610 + }, + { + "epoch": 0.23153420813732772, + "grad_norm": 2.6215455438630966, + "learning_rate": 2.691746660375682e-06, + "loss": 0.5305, + "step": 5611 + }, + { + "epoch": 0.23157547247668564, + "grad_norm": 2.2807538234937756, + "learning_rate": 2.6916249072569957e-06, + "loss": 0.4991, + "step": 5612 + }, + { + "epoch": 0.23161673681604358, + "grad_norm": 2.8100723163115413, + "learning_rate": 2.6915031328529083e-06, + "loss": 0.5198, + "step": 5613 + }, + { + "epoch": 0.2316580011554015, + "grad_norm": 3.133897245027761, + "learning_rate": 2.6913813371655942e-06, + "loss": 0.5648, + "step": 5614 + }, + { + "epoch": 0.23169926549475942, + "grad_norm": 4.528935831354502, + "learning_rate": 2.691259520197229e-06, + "loss": 0.5491, + "step": 5615 + }, + { + "epoch": 0.23174052983411736, + "grad_norm": 3.789408686966973, + "learning_rate": 2.6911376819499883e-06, + "loss": 0.523, + "step": 5616 + }, + { + "epoch": 0.23178179417347528, + "grad_norm": 2.7518873437168874, + "learning_rate": 2.6910158224260497e-06, + "loss": 0.5395, + "step": 5617 + }, + { + "epoch": 0.23182305851283322, + "grad_norm": 3.3391065547554644, + "learning_rate": 2.690893941627588e-06, + "loss": 0.6233, + "step": 5618 + }, + { + "epoch": 0.23186432285219113, + "grad_norm": 10.104036364477874, + "learning_rate": 2.690772039556782e-06, + "loss": 0.5189, + "step": 5619 + }, + { + "epoch": 0.23190558719154905, + "grad_norm": 3.6428497544993594, + "learning_rate": 2.690650116215808e-06, + "loss": 0.5853, + "step": 5620 + }, + { + "epoch": 0.231946851530907, + "grad_norm": 2.7091397481794988, + "learning_rate": 2.690528171606845e-06, + "loss": 0.5608, + "step": 5621 + }, + { + "epoch": 0.2319881158702649, + "grad_norm": 2.8911439055646, + "learning_rate": 2.6904062057320703e-06, + "loss": 0.5978, + "step": 5622 + }, + { + "epoch": 0.23202938020962285, + "grad_norm": 4.822952556724231, + "learning_rate": 2.6902842185936626e-06, + "loss": 0.5056, + "step": 5623 + }, + { + "epoch": 0.23207064454898077, + "grad_norm": 3.6757019919409335, + "learning_rate": 2.6901622101938016e-06, + "loss": 0.5406, + "step": 5624 + }, + { + "epoch": 0.23211190888833869, + "grad_norm": 2.9925268389574926, + "learning_rate": 2.6900401805346655e-06, + "loss": 0.5746, + "step": 5625 + }, + { + "epoch": 0.23215317322769663, + "grad_norm": 3.288082484502557, + "learning_rate": 2.6899181296184356e-06, + "loss": 0.5339, + "step": 5626 + }, + { + "epoch": 0.23219443756705455, + "grad_norm": 4.040764448959781, + "learning_rate": 2.6897960574472906e-06, + "loss": 0.5426, + "step": 5627 + }, + { + "epoch": 0.2322357019064125, + "grad_norm": 3.278553051995028, + "learning_rate": 2.689673964023412e-06, + "loss": 0.5726, + "step": 5628 + }, + { + "epoch": 0.2322769662457704, + "grad_norm": 2.7304558238167393, + "learning_rate": 2.68955184934898e-06, + "loss": 0.5379, + "step": 5629 + }, + { + "epoch": 0.23231823058512832, + "grad_norm": 6.147360042215481, + "learning_rate": 2.6894297134261763e-06, + "loss": 0.5417, + "step": 5630 + }, + { + "epoch": 0.23235949492448627, + "grad_norm": 2.3126256488302834, + "learning_rate": 2.689307556257182e-06, + "loss": 0.5475, + "step": 5631 + }, + { + "epoch": 0.23240075926384418, + "grad_norm": 4.766579506826264, + "learning_rate": 2.6891853778441804e-06, + "loss": 0.5727, + "step": 5632 + }, + { + "epoch": 0.23244202360320212, + "grad_norm": 2.4203447757556638, + "learning_rate": 2.6890631781893524e-06, + "loss": 0.5569, + "step": 5633 + }, + { + "epoch": 0.23248328794256004, + "grad_norm": 2.767346304046856, + "learning_rate": 2.688940957294882e-06, + "loss": 0.5929, + "step": 5634 + }, + { + "epoch": 0.23252455228191796, + "grad_norm": 3.7580057489274714, + "learning_rate": 2.688818715162951e-06, + "loss": 0.5318, + "step": 5635 + }, + { + "epoch": 0.2325658166212759, + "grad_norm": 2.5866585987867508, + "learning_rate": 2.688696451795745e-06, + "loss": 0.5939, + "step": 5636 + }, + { + "epoch": 0.23260708096063382, + "grad_norm": 3.8151108036252253, + "learning_rate": 2.6885741671954456e-06, + "loss": 0.5449, + "step": 5637 + }, + { + "epoch": 0.23264834529999176, + "grad_norm": 10.455420712422185, + "learning_rate": 2.6884518613642385e-06, + "loss": 0.566, + "step": 5638 + }, + { + "epoch": 0.23268960963934968, + "grad_norm": 4.468484454267434, + "learning_rate": 2.688329534304308e-06, + "loss": 0.5057, + "step": 5639 + }, + { + "epoch": 0.2327308739787076, + "grad_norm": 2.9343825612815877, + "learning_rate": 2.6882071860178397e-06, + "loss": 0.5142, + "step": 5640 + }, + { + "epoch": 0.23277213831806554, + "grad_norm": 3.3136181096566943, + "learning_rate": 2.6880848165070184e-06, + "loss": 0.5558, + "step": 5641 + }, + { + "epoch": 0.23281340265742345, + "grad_norm": 5.98730531338448, + "learning_rate": 2.68796242577403e-06, + "loss": 0.5546, + "step": 5642 + }, + { + "epoch": 0.2328546669967814, + "grad_norm": 8.584994959329093, + "learning_rate": 2.687840013821061e-06, + "loss": 0.484, + "step": 5643 + }, + { + "epoch": 0.2328959313361393, + "grad_norm": 2.8323735159762586, + "learning_rate": 2.6877175806502976e-06, + "loss": 0.6079, + "step": 5644 + }, + { + "epoch": 0.23293719567549723, + "grad_norm": 3.661795938837072, + "learning_rate": 2.6875951262639272e-06, + "loss": 0.5493, + "step": 5645 + }, + { + "epoch": 0.23297846001485517, + "grad_norm": 19.448298600591823, + "learning_rate": 2.687472650664137e-06, + "loss": 0.5448, + "step": 5646 + }, + { + "epoch": 0.2330197243542131, + "grad_norm": 9.552527085500035, + "learning_rate": 2.6873501538531144e-06, + "loss": 0.5992, + "step": 5647 + }, + { + "epoch": 0.233060988693571, + "grad_norm": 3.3835374953916513, + "learning_rate": 2.687227635833048e-06, + "loss": 0.6079, + "step": 5648 + }, + { + "epoch": 0.23310225303292895, + "grad_norm": 5.015121430730125, + "learning_rate": 2.687105096606126e-06, + "loss": 0.5852, + "step": 5649 + }, + { + "epoch": 0.23314351737228686, + "grad_norm": 2.9010928809356162, + "learning_rate": 2.6869825361745375e-06, + "loss": 0.5383, + "step": 5650 + }, + { + "epoch": 0.2331847817116448, + "grad_norm": 3.30192452740862, + "learning_rate": 2.6868599545404712e-06, + "loss": 0.5909, + "step": 5651 + }, + { + "epoch": 0.23322604605100272, + "grad_norm": 3.4264819795051906, + "learning_rate": 2.6867373517061173e-06, + "loss": 0.587, + "step": 5652 + }, + { + "epoch": 0.23326731039036064, + "grad_norm": 5.54320661183813, + "learning_rate": 2.686614727673665e-06, + "loss": 0.5154, + "step": 5653 + }, + { + "epoch": 0.23330857472971858, + "grad_norm": 3.4953846777344832, + "learning_rate": 2.6864920824453063e-06, + "loss": 0.5313, + "step": 5654 + }, + { + "epoch": 0.2333498390690765, + "grad_norm": 12.650927359460226, + "learning_rate": 2.6863694160232297e-06, + "loss": 0.5909, + "step": 5655 + }, + { + "epoch": 0.23339110340843444, + "grad_norm": 5.876825175881934, + "learning_rate": 2.6862467284096276e-06, + "loss": 0.573, + "step": 5656 + }, + { + "epoch": 0.23343236774779236, + "grad_norm": 4.570211199595891, + "learning_rate": 2.6861240196066924e-06, + "loss": 0.5442, + "step": 5657 + }, + { + "epoch": 0.23347363208715027, + "grad_norm": 5.09302767958035, + "learning_rate": 2.6860012896166143e-06, + "loss": 0.5322, + "step": 5658 + }, + { + "epoch": 0.23351489642650822, + "grad_norm": 3.543313025540964, + "learning_rate": 2.685878538441586e-06, + "loss": 0.5684, + "step": 5659 + }, + { + "epoch": 0.23355616076586613, + "grad_norm": 24.864920352726347, + "learning_rate": 2.685755766083801e-06, + "loss": 0.5416, + "step": 5660 + }, + { + "epoch": 0.23359742510522408, + "grad_norm": 3.5454249431764717, + "learning_rate": 2.685632972545451e-06, + "loss": 0.5571, + "step": 5661 + }, + { + "epoch": 0.233638689444582, + "grad_norm": 3.7166900954835604, + "learning_rate": 2.68551015782873e-06, + "loss": 0.5447, + "step": 5662 + }, + { + "epoch": 0.2336799537839399, + "grad_norm": 2.494265601897946, + "learning_rate": 2.6853873219358327e-06, + "loss": 0.5534, + "step": 5663 + }, + { + "epoch": 0.23372121812329785, + "grad_norm": 6.1487157207249465, + "learning_rate": 2.6852644648689522e-06, + "loss": 0.5327, + "step": 5664 + }, + { + "epoch": 0.23376248246265577, + "grad_norm": 3.47501740588061, + "learning_rate": 2.6851415866302834e-06, + "loss": 0.5476, + "step": 5665 + }, + { + "epoch": 0.2338037468020137, + "grad_norm": 4.36061777768523, + "learning_rate": 2.6850186872220207e-06, + "loss": 0.569, + "step": 5666 + }, + { + "epoch": 0.23384501114137163, + "grad_norm": 4.202932098286679, + "learning_rate": 2.68489576664636e-06, + "loss": 0.5592, + "step": 5667 + }, + { + "epoch": 0.23388627548072954, + "grad_norm": 4.159867391985486, + "learning_rate": 2.6847728249054966e-06, + "loss": 0.5655, + "step": 5668 + }, + { + "epoch": 0.2339275398200875, + "grad_norm": 3.0011594992583492, + "learning_rate": 2.684649862001627e-06, + "loss": 0.5452, + "step": 5669 + }, + { + "epoch": 0.2339688041594454, + "grad_norm": 4.973751725362142, + "learning_rate": 2.684526877936947e-06, + "loss": 0.608, + "step": 5670 + }, + { + "epoch": 0.23401006849880335, + "grad_norm": 4.784286954173611, + "learning_rate": 2.6844038727136538e-06, + "loss": 0.5943, + "step": 5671 + }, + { + "epoch": 0.23405133283816126, + "grad_norm": 4.245675306447393, + "learning_rate": 2.6842808463339445e-06, + "loss": 0.5786, + "step": 5672 + }, + { + "epoch": 0.23409259717751918, + "grad_norm": 2.8495625825764668, + "learning_rate": 2.684157798800017e-06, + "loss": 0.5621, + "step": 5673 + }, + { + "epoch": 0.23413386151687712, + "grad_norm": 3.979725235677735, + "learning_rate": 2.6840347301140687e-06, + "loss": 0.5151, + "step": 5674 + }, + { + "epoch": 0.23417512585623504, + "grad_norm": 6.525599308803063, + "learning_rate": 2.6839116402782983e-06, + "loss": 0.5274, + "step": 5675 + }, + { + "epoch": 0.23421639019559296, + "grad_norm": 2.582646278783992, + "learning_rate": 2.6837885292949037e-06, + "loss": 0.5205, + "step": 5676 + }, + { + "epoch": 0.2342576545349509, + "grad_norm": 2.0442382184462122, + "learning_rate": 2.683665397166085e-06, + "loss": 0.5123, + "step": 5677 + }, + { + "epoch": 0.23429891887430881, + "grad_norm": 4.742799429656837, + "learning_rate": 2.683542243894041e-06, + "loss": 0.46, + "step": 5678 + }, + { + "epoch": 0.23434018321366676, + "grad_norm": 3.8933899588459973, + "learning_rate": 2.683419069480972e-06, + "loss": 0.5616, + "step": 5679 + }, + { + "epoch": 0.23438144755302467, + "grad_norm": 3.3714431668866176, + "learning_rate": 2.683295873929078e-06, + "loss": 0.5572, + "step": 5680 + }, + { + "epoch": 0.2344227118923826, + "grad_norm": 2.514297004800758, + "learning_rate": 2.6831726572405597e-06, + "loss": 0.5062, + "step": 5681 + }, + { + "epoch": 0.23446397623174053, + "grad_norm": 5.547433106436054, + "learning_rate": 2.6830494194176175e-06, + "loss": 0.607, + "step": 5682 + }, + { + "epoch": 0.23450524057109845, + "grad_norm": 9.847492866114912, + "learning_rate": 2.6829261604624535e-06, + "loss": 0.6049, + "step": 5683 + }, + { + "epoch": 0.2345465049104564, + "grad_norm": 3.2055233472164715, + "learning_rate": 2.6828028803772683e-06, + "loss": 0.605, + "step": 5684 + }, + { + "epoch": 0.2345877692498143, + "grad_norm": 3.117453025670712, + "learning_rate": 2.6826795791642654e-06, + "loss": 0.5694, + "step": 5685 + }, + { + "epoch": 0.23462903358917223, + "grad_norm": 4.1568686499506775, + "learning_rate": 2.6825562568256463e-06, + "loss": 0.5268, + "step": 5686 + }, + { + "epoch": 0.23467029792853017, + "grad_norm": 10.645542002762951, + "learning_rate": 2.682432913363614e-06, + "loss": 0.5416, + "step": 5687 + }, + { + "epoch": 0.23471156226788809, + "grad_norm": 2.007647890168512, + "learning_rate": 2.6823095487803728e-06, + "loss": 0.5721, + "step": 5688 + }, + { + "epoch": 0.23475282660724603, + "grad_norm": 2.8144458081480175, + "learning_rate": 2.6821861630781246e-06, + "loss": 0.5425, + "step": 5689 + }, + { + "epoch": 0.23479409094660394, + "grad_norm": 5.2300046488150285, + "learning_rate": 2.6820627562590745e-06, + "loss": 0.5387, + "step": 5690 + }, + { + "epoch": 0.23483535528596186, + "grad_norm": 5.7684816449869825, + "learning_rate": 2.6819393283254263e-06, + "loss": 0.6141, + "step": 5691 + }, + { + "epoch": 0.2348766196253198, + "grad_norm": 2.459002568624837, + "learning_rate": 2.681815879279385e-06, + "loss": 0.5359, + "step": 5692 + }, + { + "epoch": 0.23491788396467772, + "grad_norm": 6.264141298531314, + "learning_rate": 2.681692409123155e-06, + "loss": 0.5581, + "step": 5693 + }, + { + "epoch": 0.23495914830403566, + "grad_norm": 4.414741358102211, + "learning_rate": 2.6815689178589436e-06, + "loss": 0.5088, + "step": 5694 + }, + { + "epoch": 0.23500041264339358, + "grad_norm": 4.116814527918655, + "learning_rate": 2.6814454054889546e-06, + "loss": 0.5897, + "step": 5695 + }, + { + "epoch": 0.2350416769827515, + "grad_norm": 2.6722253292050517, + "learning_rate": 2.681321872015396e-06, + "loss": 0.5635, + "step": 5696 + }, + { + "epoch": 0.23508294132210944, + "grad_norm": 3.068324504117248, + "learning_rate": 2.6811983174404728e-06, + "loss": 0.5395, + "step": 5697 + }, + { + "epoch": 0.23512420566146736, + "grad_norm": 2.5977757726772097, + "learning_rate": 2.6810747417663927e-06, + "loss": 0.5263, + "step": 5698 + }, + { + "epoch": 0.2351654700008253, + "grad_norm": 3.7845984364769425, + "learning_rate": 2.6809511449953633e-06, + "loss": 0.5794, + "step": 5699 + }, + { + "epoch": 0.23520673434018322, + "grad_norm": 3.312899166076857, + "learning_rate": 2.6808275271295925e-06, + "loss": 0.5712, + "step": 5700 + }, + { + "epoch": 0.23524799867954113, + "grad_norm": 7.318746163334867, + "learning_rate": 2.6807038881712876e-06, + "loss": 0.5235, + "step": 5701 + }, + { + "epoch": 0.23528926301889908, + "grad_norm": 3.631519377375058, + "learning_rate": 2.680580228122658e-06, + "loss": 0.546, + "step": 5702 + }, + { + "epoch": 0.235330527358257, + "grad_norm": 6.252862573109348, + "learning_rate": 2.680456546985912e-06, + "loss": 0.5248, + "step": 5703 + }, + { + "epoch": 0.23537179169761493, + "grad_norm": 11.679784321374498, + "learning_rate": 2.680332844763259e-06, + "loss": 0.553, + "step": 5704 + }, + { + "epoch": 0.23541305603697285, + "grad_norm": 3.62784473503553, + "learning_rate": 2.6802091214569083e-06, + "loss": 0.5433, + "step": 5705 + }, + { + "epoch": 0.23545432037633077, + "grad_norm": 3.131910974035438, + "learning_rate": 2.680085377069071e-06, + "loss": 0.548, + "step": 5706 + }, + { + "epoch": 0.2354955847156887, + "grad_norm": 4.870209283803462, + "learning_rate": 2.679961611601956e-06, + "loss": 0.5226, + "step": 5707 + }, + { + "epoch": 0.23553684905504663, + "grad_norm": 3.639901381667257, + "learning_rate": 2.6798378250577753e-06, + "loss": 0.5739, + "step": 5708 + }, + { + "epoch": 0.23557811339440454, + "grad_norm": 4.109282359352202, + "learning_rate": 2.6797140174387396e-06, + "loss": 0.5328, + "step": 5709 + }, + { + "epoch": 0.2356193777337625, + "grad_norm": 4.59230433399212, + "learning_rate": 2.67959018874706e-06, + "loss": 0.5427, + "step": 5710 + }, + { + "epoch": 0.2356606420731204, + "grad_norm": 3.395730628602293, + "learning_rate": 2.6794663389849485e-06, + "loss": 0.4967, + "step": 5711 + }, + { + "epoch": 0.23570190641247835, + "grad_norm": 4.615245112396578, + "learning_rate": 2.6793424681546185e-06, + "loss": 0.5595, + "step": 5712 + }, + { + "epoch": 0.23574317075183626, + "grad_norm": 9.052702982116589, + "learning_rate": 2.679218576258281e-06, + "loss": 0.496, + "step": 5713 + }, + { + "epoch": 0.23578443509119418, + "grad_norm": 2.996700014740062, + "learning_rate": 2.6790946632981502e-06, + "loss": 0.6066, + "step": 5714 + }, + { + "epoch": 0.23582569943055212, + "grad_norm": 2.876571085309082, + "learning_rate": 2.678970729276439e-06, + "loss": 0.4584, + "step": 5715 + }, + { + "epoch": 0.23586696376991004, + "grad_norm": 1.966952939630078, + "learning_rate": 2.678846774195361e-06, + "loss": 0.5258, + "step": 5716 + }, + { + "epoch": 0.23590822810926798, + "grad_norm": 3.259545646667538, + "learning_rate": 2.678722798057131e-06, + "loss": 0.5593, + "step": 5717 + }, + { + "epoch": 0.2359494924486259, + "grad_norm": 3.8763114259132814, + "learning_rate": 2.6785988008639625e-06, + "loss": 0.5949, + "step": 5718 + }, + { + "epoch": 0.2359907567879838, + "grad_norm": 2.658581226832864, + "learning_rate": 2.6784747826180715e-06, + "loss": 0.5244, + "step": 5719 + }, + { + "epoch": 0.23603202112734176, + "grad_norm": 6.710662091729004, + "learning_rate": 2.678350743321673e-06, + "loss": 0.5467, + "step": 5720 + }, + { + "epoch": 0.23607328546669967, + "grad_norm": 9.835055925452833, + "learning_rate": 2.6782266829769823e-06, + "loss": 0.569, + "step": 5721 + }, + { + "epoch": 0.23611454980605762, + "grad_norm": 2.909236196160744, + "learning_rate": 2.678102601586215e-06, + "loss": 0.572, + "step": 5722 + }, + { + "epoch": 0.23615581414541553, + "grad_norm": 7.4856052404662154, + "learning_rate": 2.6779784991515885e-06, + "loss": 0.5792, + "step": 5723 + }, + { + "epoch": 0.23619707848477345, + "grad_norm": 4.304155910956405, + "learning_rate": 2.677854375675319e-06, + "loss": 0.5456, + "step": 5724 + }, + { + "epoch": 0.2362383428241314, + "grad_norm": 3.7468384375628676, + "learning_rate": 2.677730231159624e-06, + "loss": 0.5706, + "step": 5725 + }, + { + "epoch": 0.2362796071634893, + "grad_norm": 3.205883680882137, + "learning_rate": 2.6776060656067207e-06, + "loss": 0.5657, + "step": 5726 + }, + { + "epoch": 0.23632087150284725, + "grad_norm": 2.626704042343657, + "learning_rate": 2.6774818790188272e-06, + "loss": 0.5664, + "step": 5727 + }, + { + "epoch": 0.23636213584220517, + "grad_norm": 4.738114568092054, + "learning_rate": 2.6773576713981617e-06, + "loss": 0.5522, + "step": 5728 + }, + { + "epoch": 0.23640340018156308, + "grad_norm": 5.4218294911029, + "learning_rate": 2.6772334427469432e-06, + "loss": 0.5664, + "step": 5729 + }, + { + "epoch": 0.23644466452092103, + "grad_norm": 14.989404820194943, + "learning_rate": 2.67710919306739e-06, + "loss": 0.542, + "step": 5730 + }, + { + "epoch": 0.23648592886027894, + "grad_norm": 3.677783630663191, + "learning_rate": 2.6769849223617217e-06, + "loss": 0.5229, + "step": 5731 + }, + { + "epoch": 0.2365271931996369, + "grad_norm": 3.0850881689683556, + "learning_rate": 2.676860630632159e-06, + "loss": 0.5163, + "step": 5732 + }, + { + "epoch": 0.2365684575389948, + "grad_norm": 4.063812306244201, + "learning_rate": 2.676736317880921e-06, + "loss": 0.5818, + "step": 5733 + }, + { + "epoch": 0.23660972187835272, + "grad_norm": 3.473194770307248, + "learning_rate": 2.676611984110229e-06, + "loss": 0.6399, + "step": 5734 + }, + { + "epoch": 0.23665098621771066, + "grad_norm": 4.917577729678903, + "learning_rate": 2.676487629322303e-06, + "loss": 0.4854, + "step": 5735 + }, + { + "epoch": 0.23669225055706858, + "grad_norm": 3.721946874305899, + "learning_rate": 2.6763632535193654e-06, + "loss": 0.5825, + "step": 5736 + }, + { + "epoch": 0.2367335148964265, + "grad_norm": 2.987038867768998, + "learning_rate": 2.676238856703637e-06, + "loss": 0.5887, + "step": 5737 + }, + { + "epoch": 0.23677477923578444, + "grad_norm": 5.027738896162872, + "learning_rate": 2.6761144388773395e-06, + "loss": 0.5338, + "step": 5738 + }, + { + "epoch": 0.23681604357514235, + "grad_norm": 2.783561431230871, + "learning_rate": 2.675990000042697e-06, + "loss": 0.5267, + "step": 5739 + }, + { + "epoch": 0.2368573079145003, + "grad_norm": 3.291559362630088, + "learning_rate": 2.6758655402019305e-06, + "loss": 0.5051, + "step": 5740 + }, + { + "epoch": 0.23689857225385821, + "grad_norm": 3.377941569402298, + "learning_rate": 2.675741059357264e-06, + "loss": 0.4999, + "step": 5741 + }, + { + "epoch": 0.23693983659321613, + "grad_norm": 27.342497877416722, + "learning_rate": 2.675616557510921e-06, + "loss": 0.5426, + "step": 5742 + }, + { + "epoch": 0.23698110093257407, + "grad_norm": 3.9833296786317143, + "learning_rate": 2.6754920346651256e-06, + "loss": 0.5264, + "step": 5743 + }, + { + "epoch": 0.237022365271932, + "grad_norm": 9.476658648117036, + "learning_rate": 2.6753674908221017e-06, + "loss": 0.5251, + "step": 5744 + }, + { + "epoch": 0.23706362961128993, + "grad_norm": 2.8431934332996303, + "learning_rate": 2.675242925984074e-06, + "loss": 0.5917, + "step": 5745 + }, + { + "epoch": 0.23710489395064785, + "grad_norm": 3.924714198850453, + "learning_rate": 2.6751183401532673e-06, + "loss": 0.5501, + "step": 5746 + }, + { + "epoch": 0.23714615829000577, + "grad_norm": 4.677335691451728, + "learning_rate": 2.674993733331908e-06, + "loss": 0.4847, + "step": 5747 + }, + { + "epoch": 0.2371874226293637, + "grad_norm": 3.6367664074108346, + "learning_rate": 2.6748691055222204e-06, + "loss": 0.6215, + "step": 5748 + }, + { + "epoch": 0.23722868696872162, + "grad_norm": 2.4965883164660965, + "learning_rate": 2.6747444567264323e-06, + "loss": 0.5152, + "step": 5749 + }, + { + "epoch": 0.23726995130807957, + "grad_norm": 2.6470246041643235, + "learning_rate": 2.674619786946769e-06, + "loss": 0.4832, + "step": 5750 + }, + { + "epoch": 0.23731121564743748, + "grad_norm": 2.46792999245637, + "learning_rate": 2.6744950961854583e-06, + "loss": 0.5716, + "step": 5751 + }, + { + "epoch": 0.2373524799867954, + "grad_norm": 4.686859520534411, + "learning_rate": 2.674370384444727e-06, + "loss": 0.5433, + "step": 5752 + }, + { + "epoch": 0.23739374432615334, + "grad_norm": 2.859689689508159, + "learning_rate": 2.6742456517268024e-06, + "loss": 0.5412, + "step": 5753 + }, + { + "epoch": 0.23743500866551126, + "grad_norm": 49.56069884850471, + "learning_rate": 2.6741208980339132e-06, + "loss": 0.5136, + "step": 5754 + }, + { + "epoch": 0.2374762730048692, + "grad_norm": 3.848557271047912, + "learning_rate": 2.6739961233682876e-06, + "loss": 0.6176, + "step": 5755 + }, + { + "epoch": 0.23751753734422712, + "grad_norm": 2.671467731207758, + "learning_rate": 2.673871327732154e-06, + "loss": 0.5448, + "step": 5756 + }, + { + "epoch": 0.23755880168358504, + "grad_norm": 4.512012014070775, + "learning_rate": 2.673746511127742e-06, + "loss": 0.5695, + "step": 5757 + }, + { + "epoch": 0.23760006602294298, + "grad_norm": 3.999517196100956, + "learning_rate": 2.6736216735572815e-06, + "loss": 0.4908, + "step": 5758 + }, + { + "epoch": 0.2376413303623009, + "grad_norm": 2.733667333485494, + "learning_rate": 2.6734968150230015e-06, + "loss": 0.5483, + "step": 5759 + }, + { + "epoch": 0.23768259470165884, + "grad_norm": 4.564997099807528, + "learning_rate": 2.6733719355271332e-06, + "loss": 0.4954, + "step": 5760 + }, + { + "epoch": 0.23772385904101676, + "grad_norm": 5.306403385061438, + "learning_rate": 2.673247035071906e-06, + "loss": 0.5699, + "step": 5761 + }, + { + "epoch": 0.23776512338037467, + "grad_norm": 2.792893920367877, + "learning_rate": 2.6731221136595523e-06, + "loss": 0.542, + "step": 5762 + }, + { + "epoch": 0.23780638771973261, + "grad_norm": 6.0009519912902585, + "learning_rate": 2.672997171292303e-06, + "loss": 0.565, + "step": 5763 + }, + { + "epoch": 0.23784765205909053, + "grad_norm": 3.8519597583338125, + "learning_rate": 2.67287220797239e-06, + "loss": 0.5263, + "step": 5764 + }, + { + "epoch": 0.23788891639844847, + "grad_norm": 2.957597892515851, + "learning_rate": 2.6727472237020448e-06, + "loss": 0.5348, + "step": 5765 + }, + { + "epoch": 0.2379301807378064, + "grad_norm": 5.562486608384008, + "learning_rate": 2.6726222184835003e-06, + "loss": 0.5735, + "step": 5766 + }, + { + "epoch": 0.2379714450771643, + "grad_norm": 4.072374595798148, + "learning_rate": 2.6724971923189903e-06, + "loss": 0.5624, + "step": 5767 + }, + { + "epoch": 0.23801270941652225, + "grad_norm": 5.373168586168264, + "learning_rate": 2.6723721452107466e-06, + "loss": 0.5338, + "step": 5768 + }, + { + "epoch": 0.23805397375588017, + "grad_norm": 7.712625422509265, + "learning_rate": 2.672247077161004e-06, + "loss": 0.5466, + "step": 5769 + }, + { + "epoch": 0.23809523809523808, + "grad_norm": 2.6509035680526023, + "learning_rate": 2.672121988171996e-06, + "loss": 0.6134, + "step": 5770 + }, + { + "epoch": 0.23813650243459603, + "grad_norm": 3.0483272476993983, + "learning_rate": 2.671996878245957e-06, + "loss": 0.5299, + "step": 5771 + }, + { + "epoch": 0.23817776677395394, + "grad_norm": 4.142637544667749, + "learning_rate": 2.671871747385122e-06, + "loss": 0.5591, + "step": 5772 + }, + { + "epoch": 0.23821903111331189, + "grad_norm": 2.771661419763454, + "learning_rate": 2.6717465955917257e-06, + "loss": 0.5593, + "step": 5773 + }, + { + "epoch": 0.2382602954526698, + "grad_norm": 3.798131969835768, + "learning_rate": 2.6716214228680037e-06, + "loss": 0.494, + "step": 5774 + }, + { + "epoch": 0.23830155979202772, + "grad_norm": 3.4659373913660727, + "learning_rate": 2.6714962292161932e-06, + "loss": 0.5777, + "step": 5775 + }, + { + "epoch": 0.23834282413138566, + "grad_norm": 2.9840664581540706, + "learning_rate": 2.6713710146385286e-06, + "loss": 0.4874, + "step": 5776 + }, + { + "epoch": 0.23838408847074358, + "grad_norm": 6.439467801859424, + "learning_rate": 2.6712457791372474e-06, + "loss": 0.5695, + "step": 5777 + }, + { + "epoch": 0.23842535281010152, + "grad_norm": 5.431172057026441, + "learning_rate": 2.671120522714587e-06, + "loss": 0.5303, + "step": 5778 + }, + { + "epoch": 0.23846661714945944, + "grad_norm": 2.4195937302324673, + "learning_rate": 2.670995245372784e-06, + "loss": 0.5663, + "step": 5779 + }, + { + "epoch": 0.23850788148881735, + "grad_norm": 4.544639543806414, + "learning_rate": 2.6708699471140763e-06, + "loss": 0.5475, + "step": 5780 + }, + { + "epoch": 0.2385491458281753, + "grad_norm": 15.379998148399645, + "learning_rate": 2.670744627940703e-06, + "loss": 0.6108, + "step": 5781 + }, + { + "epoch": 0.2385904101675332, + "grad_norm": 5.47776669747441, + "learning_rate": 2.670619287854901e-06, + "loss": 0.5382, + "step": 5782 + }, + { + "epoch": 0.23863167450689116, + "grad_norm": 2.9547154868325425, + "learning_rate": 2.670493926858911e-06, + "loss": 0.5346, + "step": 5783 + }, + { + "epoch": 0.23867293884624907, + "grad_norm": 2.717473687809385, + "learning_rate": 2.670368544954971e-06, + "loss": 0.5347, + "step": 5784 + }, + { + "epoch": 0.238714203185607, + "grad_norm": 4.478957877553497, + "learning_rate": 2.6702431421453207e-06, + "loss": 0.566, + "step": 5785 + }, + { + "epoch": 0.23875546752496493, + "grad_norm": 2.4456672252961544, + "learning_rate": 2.6701177184322004e-06, + "loss": 0.5233, + "step": 5786 + }, + { + "epoch": 0.23879673186432285, + "grad_norm": 4.66374508413338, + "learning_rate": 2.6699922738178506e-06, + "loss": 0.59, + "step": 5787 + }, + { + "epoch": 0.2388379962036808, + "grad_norm": 2.2389378370713056, + "learning_rate": 2.6698668083045125e-06, + "loss": 0.5896, + "step": 5788 + }, + { + "epoch": 0.2388792605430387, + "grad_norm": 3.294816917934533, + "learning_rate": 2.6697413218944256e-06, + "loss": 0.5498, + "step": 5789 + }, + { + "epoch": 0.23892052488239662, + "grad_norm": 6.886850767700792, + "learning_rate": 2.669615814589833e-06, + "loss": 0.5794, + "step": 5790 + }, + { + "epoch": 0.23896178922175457, + "grad_norm": 4.088449410105349, + "learning_rate": 2.6694902863929756e-06, + "loss": 0.5262, + "step": 5791 + }, + { + "epoch": 0.23900305356111248, + "grad_norm": 4.303590289928025, + "learning_rate": 2.669364737306097e-06, + "loss": 0.5519, + "step": 5792 + }, + { + "epoch": 0.23904431790047043, + "grad_norm": 8.782851532311025, + "learning_rate": 2.669239167331438e-06, + "loss": 0.5669, + "step": 5793 + }, + { + "epoch": 0.23908558223982834, + "grad_norm": 3.6617476866609304, + "learning_rate": 2.669113576471243e-06, + "loss": 0.557, + "step": 5794 + }, + { + "epoch": 0.23912684657918626, + "grad_norm": 2.407950663884276, + "learning_rate": 2.6689879647277543e-06, + "loss": 0.5314, + "step": 5795 + }, + { + "epoch": 0.2391681109185442, + "grad_norm": 6.796377581273207, + "learning_rate": 2.668862332103217e-06, + "loss": 0.4924, + "step": 5796 + }, + { + "epoch": 0.23920937525790212, + "grad_norm": 6.64169229835868, + "learning_rate": 2.668736678599874e-06, + "loss": 0.538, + "step": 5797 + }, + { + "epoch": 0.23925063959726003, + "grad_norm": 3.365502546096703, + "learning_rate": 2.6686110042199696e-06, + "loss": 0.5559, + "step": 5798 + }, + { + "epoch": 0.23929190393661798, + "grad_norm": 2.975027257886917, + "learning_rate": 2.66848530896575e-06, + "loss": 0.561, + "step": 5799 + }, + { + "epoch": 0.2393331682759759, + "grad_norm": 2.565374826586802, + "learning_rate": 2.6683595928394593e-06, + "loss": 0.5557, + "step": 5800 + }, + { + "epoch": 0.23937443261533384, + "grad_norm": 5.082490607407376, + "learning_rate": 2.668233855843344e-06, + "loss": 0.5881, + "step": 5801 + }, + { + "epoch": 0.23941569695469175, + "grad_norm": 2.336332487914001, + "learning_rate": 2.6681080979796486e-06, + "loss": 0.5343, + "step": 5802 + }, + { + "epoch": 0.23945696129404967, + "grad_norm": 3.0789102452778425, + "learning_rate": 2.6679823192506214e-06, + "loss": 0.5428, + "step": 5803 + }, + { + "epoch": 0.2394982256334076, + "grad_norm": 13.11449546890917, + "learning_rate": 2.667856519658508e-06, + "loss": 0.5883, + "step": 5804 + }, + { + "epoch": 0.23953948997276553, + "grad_norm": 3.8055182289754454, + "learning_rate": 2.667730699205555e-06, + "loss": 0.5422, + "step": 5805 + }, + { + "epoch": 0.23958075431212347, + "grad_norm": 4.632824656244654, + "learning_rate": 2.667604857894011e-06, + "loss": 0.5349, + "step": 5806 + }, + { + "epoch": 0.2396220186514814, + "grad_norm": 3.1989333580659918, + "learning_rate": 2.6674789957261232e-06, + "loss": 0.5435, + "step": 5807 + }, + { + "epoch": 0.2396632829908393, + "grad_norm": 2.4536005498898312, + "learning_rate": 2.66735311270414e-06, + "loss": 0.6034, + "step": 5808 + }, + { + "epoch": 0.23970454733019725, + "grad_norm": 2.9674523671855306, + "learning_rate": 2.6672272088303096e-06, + "loss": 0.5244, + "step": 5809 + }, + { + "epoch": 0.23974581166955516, + "grad_norm": 7.849188418287484, + "learning_rate": 2.6671012841068814e-06, + "loss": 0.5793, + "step": 5810 + }, + { + "epoch": 0.2397870760089131, + "grad_norm": 3.0766813553153285, + "learning_rate": 2.666975338536105e-06, + "loss": 0.5786, + "step": 5811 + }, + { + "epoch": 0.23982834034827102, + "grad_norm": 4.135932840371246, + "learning_rate": 2.6668493721202295e-06, + "loss": 0.5297, + "step": 5812 + }, + { + "epoch": 0.23986960468762894, + "grad_norm": 4.341486749625472, + "learning_rate": 2.6667233848615053e-06, + "loss": 0.5513, + "step": 5813 + }, + { + "epoch": 0.23991086902698688, + "grad_norm": 6.385951544863683, + "learning_rate": 2.666597376762183e-06, + "loss": 0.495, + "step": 5814 + }, + { + "epoch": 0.2399521333663448, + "grad_norm": 4.871054498214868, + "learning_rate": 2.6664713478245126e-06, + "loss": 0.527, + "step": 5815 + }, + { + "epoch": 0.23999339770570274, + "grad_norm": 45.20771005559675, + "learning_rate": 2.6663452980507466e-06, + "loss": 0.5848, + "step": 5816 + }, + { + "epoch": 0.24003466204506066, + "grad_norm": 2.6953901223715566, + "learning_rate": 2.666219227443135e-06, + "loss": 0.5105, + "step": 5817 + }, + { + "epoch": 0.24007592638441858, + "grad_norm": 2.2920400583982046, + "learning_rate": 2.666093136003931e-06, + "loss": 0.5566, + "step": 5818 + }, + { + "epoch": 0.24011719072377652, + "grad_norm": 2.8296418393824307, + "learning_rate": 2.6659670237353867e-06, + "loss": 0.5606, + "step": 5819 + }, + { + "epoch": 0.24015845506313444, + "grad_norm": 3.0145483203587857, + "learning_rate": 2.6658408906397537e-06, + "loss": 0.4676, + "step": 5820 + }, + { + "epoch": 0.24019971940249238, + "grad_norm": 2.8216680972693253, + "learning_rate": 2.6657147367192867e-06, + "loss": 0.5928, + "step": 5821 + }, + { + "epoch": 0.2402409837418503, + "grad_norm": 23.83039840826142, + "learning_rate": 2.665588561976238e-06, + "loss": 0.5352, + "step": 5822 + }, + { + "epoch": 0.2402822480812082, + "grad_norm": 3.61098007238417, + "learning_rate": 2.665462366412862e-06, + "loss": 0.5671, + "step": 5823 + }, + { + "epoch": 0.24032351242056615, + "grad_norm": 38.098672666945404, + "learning_rate": 2.665336150031413e-06, + "loss": 0.5782, + "step": 5824 + }, + { + "epoch": 0.24036477675992407, + "grad_norm": 2.7343121021001133, + "learning_rate": 2.665209912834144e-06, + "loss": 0.5356, + "step": 5825 + }, + { + "epoch": 0.24040604109928201, + "grad_norm": 2.251495117212428, + "learning_rate": 2.6650836548233117e-06, + "loss": 0.5837, + "step": 5826 + }, + { + "epoch": 0.24044730543863993, + "grad_norm": 4.5931167083462165, + "learning_rate": 2.6649573760011705e-06, + "loss": 0.5553, + "step": 5827 + }, + { + "epoch": 0.24048856977799785, + "grad_norm": 2.860284885457152, + "learning_rate": 2.6648310763699763e-06, + "loss": 0.5326, + "step": 5828 + }, + { + "epoch": 0.2405298341173558, + "grad_norm": 4.827442318847925, + "learning_rate": 2.6647047559319853e-06, + "loss": 0.5806, + "step": 5829 + }, + { + "epoch": 0.2405710984567137, + "grad_norm": 3.1309573670031976, + "learning_rate": 2.6645784146894536e-06, + "loss": 0.5157, + "step": 5830 + }, + { + "epoch": 0.24061236279607162, + "grad_norm": 4.692763535928423, + "learning_rate": 2.6644520526446384e-06, + "loss": 0.5797, + "step": 5831 + }, + { + "epoch": 0.24065362713542957, + "grad_norm": 5.12948156761253, + "learning_rate": 2.6643256697997963e-06, + "loss": 0.5449, + "step": 5832 + }, + { + "epoch": 0.24069489147478748, + "grad_norm": 5.230113185461175, + "learning_rate": 2.6641992661571847e-06, + "loss": 0.5714, + "step": 5833 + }, + { + "epoch": 0.24073615581414542, + "grad_norm": 2.3473227280217674, + "learning_rate": 2.6640728417190625e-06, + "loss": 0.4998, + "step": 5834 + }, + { + "epoch": 0.24077742015350334, + "grad_norm": 8.160954941279345, + "learning_rate": 2.6639463964876866e-06, + "loss": 0.5944, + "step": 5835 + }, + { + "epoch": 0.24081868449286126, + "grad_norm": 2.49283671917186, + "learning_rate": 2.6638199304653166e-06, + "loss": 0.5492, + "step": 5836 + }, + { + "epoch": 0.2408599488322192, + "grad_norm": 4.374679392185042, + "learning_rate": 2.663693443654211e-06, + "loss": 0.5227, + "step": 5837 + }, + { + "epoch": 0.24090121317157712, + "grad_norm": 3.628428604942947, + "learning_rate": 2.6635669360566298e-06, + "loss": 0.5793, + "step": 5838 + }, + { + "epoch": 0.24094247751093506, + "grad_norm": 4.060587366603556, + "learning_rate": 2.663440407674832e-06, + "loss": 0.5365, + "step": 5839 + }, + { + "epoch": 0.24098374185029298, + "grad_norm": 2.6658679359323787, + "learning_rate": 2.6633138585110786e-06, + "loss": 0.5386, + "step": 5840 + }, + { + "epoch": 0.2410250061896509, + "grad_norm": 16.96845160611494, + "learning_rate": 2.6631872885676288e-06, + "loss": 0.5241, + "step": 5841 + }, + { + "epoch": 0.24106627052900884, + "grad_norm": 3.937827406944194, + "learning_rate": 2.6630606978467445e-06, + "loss": 0.571, + "step": 5842 + }, + { + "epoch": 0.24110753486836675, + "grad_norm": 7.212832232495875, + "learning_rate": 2.6629340863506866e-06, + "loss": 0.559, + "step": 5843 + }, + { + "epoch": 0.2411487992077247, + "grad_norm": 2.920513249597583, + "learning_rate": 2.6628074540817165e-06, + "loss": 0.4931, + "step": 5844 + }, + { + "epoch": 0.2411900635470826, + "grad_norm": 10.791102469628672, + "learning_rate": 2.6626808010420963e-06, + "loss": 0.6464, + "step": 5845 + }, + { + "epoch": 0.24123132788644053, + "grad_norm": 7.727326143633872, + "learning_rate": 2.662554127234089e-06, + "loss": 0.5874, + "step": 5846 + }, + { + "epoch": 0.24127259222579847, + "grad_norm": 3.880639846346912, + "learning_rate": 2.662427432659956e-06, + "loss": 0.5445, + "step": 5847 + }, + { + "epoch": 0.2413138565651564, + "grad_norm": 2.7837171839821653, + "learning_rate": 2.6623007173219617e-06, + "loss": 0.5397, + "step": 5848 + }, + { + "epoch": 0.24135512090451433, + "grad_norm": 3.1226724399938615, + "learning_rate": 2.662173981222369e-06, + "loss": 0.4569, + "step": 5849 + }, + { + "epoch": 0.24139638524387225, + "grad_norm": 4.90768721990518, + "learning_rate": 2.662047224363441e-06, + "loss": 0.5952, + "step": 5850 + }, + { + "epoch": 0.24143764958323016, + "grad_norm": 3.577035417011018, + "learning_rate": 2.661920446747443e-06, + "loss": 0.4985, + "step": 5851 + }, + { + "epoch": 0.2414789139225881, + "grad_norm": 3.772500877099922, + "learning_rate": 2.6617936483766395e-06, + "loss": 0.5503, + "step": 5852 + }, + { + "epoch": 0.24152017826194602, + "grad_norm": 4.239381894860853, + "learning_rate": 2.6616668292532947e-06, + "loss": 0.5667, + "step": 5853 + }, + { + "epoch": 0.24156144260130397, + "grad_norm": 2.5970642351102007, + "learning_rate": 2.6615399893796744e-06, + "loss": 0.5998, + "step": 5854 + }, + { + "epoch": 0.24160270694066188, + "grad_norm": 3.1461801678144448, + "learning_rate": 2.661413128758044e-06, + "loss": 0.5137, + "step": 5855 + }, + { + "epoch": 0.2416439712800198, + "grad_norm": 2.5464519937691774, + "learning_rate": 2.66128624739067e-06, + "loss": 0.5565, + "step": 5856 + }, + { + "epoch": 0.24168523561937774, + "grad_norm": 2.957442831403805, + "learning_rate": 2.661159345279818e-06, + "loss": 0.5842, + "step": 5857 + }, + { + "epoch": 0.24172649995873566, + "grad_norm": 5.788721279495027, + "learning_rate": 2.6610324224277556e-06, + "loss": 0.5413, + "step": 5858 + }, + { + "epoch": 0.24176776429809357, + "grad_norm": 20.199069311416444, + "learning_rate": 2.6609054788367495e-06, + "loss": 0.549, + "step": 5859 + }, + { + "epoch": 0.24180902863745152, + "grad_norm": 5.655579907809777, + "learning_rate": 2.660778514509068e-06, + "loss": 0.5289, + "step": 5860 + }, + { + "epoch": 0.24185029297680943, + "grad_norm": 4.25171817832081, + "learning_rate": 2.660651529446978e-06, + "loss": 0.5515, + "step": 5861 + }, + { + "epoch": 0.24189155731616738, + "grad_norm": 4.4884107370718604, + "learning_rate": 2.660524523652748e-06, + "loss": 0.5552, + "step": 5862 + }, + { + "epoch": 0.2419328216555253, + "grad_norm": 7.397053927368603, + "learning_rate": 2.660397497128647e-06, + "loss": 0.471, + "step": 5863 + }, + { + "epoch": 0.2419740859948832, + "grad_norm": 4.925403990591784, + "learning_rate": 2.660270449876944e-06, + "loss": 0.6081, + "step": 5864 + }, + { + "epoch": 0.24201535033424115, + "grad_norm": 6.012990842367945, + "learning_rate": 2.660143381899908e-06, + "loss": 0.5325, + "step": 5865 + }, + { + "epoch": 0.24205661467359907, + "grad_norm": 4.001494734638523, + "learning_rate": 2.660016293199809e-06, + "loss": 0.5956, + "step": 5866 + }, + { + "epoch": 0.242097879012957, + "grad_norm": 3.5348722419658842, + "learning_rate": 2.659889183778917e-06, + "loss": 0.5632, + "step": 5867 + }, + { + "epoch": 0.24213914335231493, + "grad_norm": 2.430496690554853, + "learning_rate": 2.659762053639503e-06, + "loss": 0.5598, + "step": 5868 + }, + { + "epoch": 0.24218040769167284, + "grad_norm": 5.121270801683922, + "learning_rate": 2.6596349027838368e-06, + "loss": 0.5277, + "step": 5869 + }, + { + "epoch": 0.2422216720310308, + "grad_norm": 3.88429507255277, + "learning_rate": 2.659507731214191e-06, + "loss": 0.5622, + "step": 5870 + }, + { + "epoch": 0.2422629363703887, + "grad_norm": 15.868714974688393, + "learning_rate": 2.659380538932836e-06, + "loss": 0.6123, + "step": 5871 + }, + { + "epoch": 0.24230420070974665, + "grad_norm": 2.633617707804052, + "learning_rate": 2.6592533259420443e-06, + "loss": 0.537, + "step": 5872 + }, + { + "epoch": 0.24234546504910456, + "grad_norm": 16.760812815287572, + "learning_rate": 2.659126092244088e-06, + "loss": 0.5534, + "step": 5873 + }, + { + "epoch": 0.24238672938846248, + "grad_norm": 34.42132255413082, + "learning_rate": 2.6589988378412403e-06, + "loss": 0.5815, + "step": 5874 + }, + { + "epoch": 0.24242799372782042, + "grad_norm": 8.831302102193744, + "learning_rate": 2.658871562735774e-06, + "loss": 0.5355, + "step": 5875 + }, + { + "epoch": 0.24246925806717834, + "grad_norm": 4.521002514456667, + "learning_rate": 2.6587442669299625e-06, + "loss": 0.5996, + "step": 5876 + }, + { + "epoch": 0.24251052240653628, + "grad_norm": 6.724137901015465, + "learning_rate": 2.6586169504260793e-06, + "loss": 0.6035, + "step": 5877 + }, + { + "epoch": 0.2425517867458942, + "grad_norm": 3.9224529939318824, + "learning_rate": 2.6584896132263996e-06, + "loss": 0.5819, + "step": 5878 + }, + { + "epoch": 0.24259305108525211, + "grad_norm": 6.742839437602644, + "learning_rate": 2.658362255333197e-06, + "loss": 0.5513, + "step": 5879 + }, + { + "epoch": 0.24263431542461006, + "grad_norm": 3.7979119449809478, + "learning_rate": 2.6582348767487463e-06, + "loss": 0.5522, + "step": 5880 + }, + { + "epoch": 0.24267557976396797, + "grad_norm": 4.673746442447521, + "learning_rate": 2.658107477475324e-06, + "loss": 0.594, + "step": 5881 + }, + { + "epoch": 0.24271684410332592, + "grad_norm": 6.532897964888608, + "learning_rate": 2.6579800575152046e-06, + "loss": 0.607, + "step": 5882 + }, + { + "epoch": 0.24275810844268383, + "grad_norm": 4.24392198136554, + "learning_rate": 2.6578526168706644e-06, + "loss": 0.524, + "step": 5883 + }, + { + "epoch": 0.24279937278204175, + "grad_norm": 3.7428793847853665, + "learning_rate": 2.6577251555439805e-06, + "loss": 0.5194, + "step": 5884 + }, + { + "epoch": 0.2428406371213997, + "grad_norm": 1.94165312250491, + "learning_rate": 2.6575976735374285e-06, + "loss": 0.5453, + "step": 5885 + }, + { + "epoch": 0.2428819014607576, + "grad_norm": 4.3385957496212475, + "learning_rate": 2.6574701708532866e-06, + "loss": 0.5273, + "step": 5886 + }, + { + "epoch": 0.24292316580011555, + "grad_norm": 13.245063420121738, + "learning_rate": 2.6573426474938314e-06, + "loss": 0.5555, + "step": 5887 + }, + { + "epoch": 0.24296443013947347, + "grad_norm": 3.8400542247744514, + "learning_rate": 2.657215103461342e-06, + "loss": 0.5485, + "step": 5888 + }, + { + "epoch": 0.24300569447883139, + "grad_norm": 6.505092533062324, + "learning_rate": 2.6570875387580956e-06, + "loss": 0.5702, + "step": 5889 + }, + { + "epoch": 0.24304695881818933, + "grad_norm": 36.17320497383095, + "learning_rate": 2.656959953386371e-06, + "loss": 0.5811, + "step": 5890 + }, + { + "epoch": 0.24308822315754725, + "grad_norm": 2.436625915028202, + "learning_rate": 2.6568323473484476e-06, + "loss": 0.5241, + "step": 5891 + }, + { + "epoch": 0.24312948749690516, + "grad_norm": 3.2500004841875736, + "learning_rate": 2.6567047206466047e-06, + "loss": 0.5204, + "step": 5892 + }, + { + "epoch": 0.2431707518362631, + "grad_norm": 4.044966382138557, + "learning_rate": 2.6565770732831215e-06, + "loss": 0.5701, + "step": 5893 + }, + { + "epoch": 0.24321201617562102, + "grad_norm": 3.761705594908179, + "learning_rate": 2.656449405260279e-06, + "loss": 0.614, + "step": 5894 + }, + { + "epoch": 0.24325328051497896, + "grad_norm": 2.8301711130611396, + "learning_rate": 2.6563217165803568e-06, + "loss": 0.5159, + "step": 5895 + }, + { + "epoch": 0.24329454485433688, + "grad_norm": 11.26402375142107, + "learning_rate": 2.6561940072456354e-06, + "loss": 0.5462, + "step": 5896 + }, + { + "epoch": 0.2433358091936948, + "grad_norm": 6.975947794092705, + "learning_rate": 2.6560662772583974e-06, + "loss": 0.5242, + "step": 5897 + }, + { + "epoch": 0.24337707353305274, + "grad_norm": 5.035790523311619, + "learning_rate": 2.6559385266209235e-06, + "loss": 0.5329, + "step": 5898 + }, + { + "epoch": 0.24341833787241066, + "grad_norm": 2.523084497304878, + "learning_rate": 2.655810755335496e-06, + "loss": 0.6018, + "step": 5899 + }, + { + "epoch": 0.2434596022117686, + "grad_norm": 2.4313483058004834, + "learning_rate": 2.655682963404397e-06, + "loss": 0.5523, + "step": 5900 + }, + { + "epoch": 0.24350086655112652, + "grad_norm": 3.073397302034637, + "learning_rate": 2.655555150829909e-06, + "loss": 0.56, + "step": 5901 + }, + { + "epoch": 0.24354213089048443, + "grad_norm": 4.0433852337325185, + "learning_rate": 2.655427317614315e-06, + "loss": 0.5329, + "step": 5902 + }, + { + "epoch": 0.24358339522984238, + "grad_norm": 3.4366647345694785, + "learning_rate": 2.655299463759899e-06, + "loss": 0.5359, + "step": 5903 + }, + { + "epoch": 0.2436246595692003, + "grad_norm": 2.929966842661493, + "learning_rate": 2.655171589268944e-06, + "loss": 0.5582, + "step": 5904 + }, + { + "epoch": 0.24366592390855824, + "grad_norm": 5.802483292071831, + "learning_rate": 2.6550436941437354e-06, + "loss": 0.5325, + "step": 5905 + }, + { + "epoch": 0.24370718824791615, + "grad_norm": 4.049891418004216, + "learning_rate": 2.6549157783865567e-06, + "loss": 0.5348, + "step": 5906 + }, + { + "epoch": 0.24374845258727407, + "grad_norm": 2.8742489392055885, + "learning_rate": 2.654787841999693e-06, + "loss": 0.5632, + "step": 5907 + }, + { + "epoch": 0.243789716926632, + "grad_norm": 2.656465354230316, + "learning_rate": 2.654659884985429e-06, + "loss": 0.5451, + "step": 5908 + }, + { + "epoch": 0.24383098126598993, + "grad_norm": 3.3372625054732707, + "learning_rate": 2.654531907346052e-06, + "loss": 0.5619, + "step": 5909 + }, + { + "epoch": 0.24387224560534787, + "grad_norm": 4.668888847732188, + "learning_rate": 2.654403909083846e-06, + "loss": 0.5731, + "step": 5910 + }, + { + "epoch": 0.2439135099447058, + "grad_norm": 5.815541133084574, + "learning_rate": 2.654275890201099e-06, + "loss": 0.4874, + "step": 5911 + }, + { + "epoch": 0.2439547742840637, + "grad_norm": 2.585882710839653, + "learning_rate": 2.6541478507000963e-06, + "loss": 0.4894, + "step": 5912 + }, + { + "epoch": 0.24399603862342165, + "grad_norm": 3.7992760681607702, + "learning_rate": 2.6540197905831266e-06, + "loss": 0.5871, + "step": 5913 + }, + { + "epoch": 0.24403730296277956, + "grad_norm": 3.602749204492559, + "learning_rate": 2.653891709852476e-06, + "loss": 0.5518, + "step": 5914 + }, + { + "epoch": 0.2440785673021375, + "grad_norm": 2.970029262056744, + "learning_rate": 2.653763608510433e-06, + "loss": 0.5951, + "step": 5915 + }, + { + "epoch": 0.24411983164149542, + "grad_norm": 2.213483225517333, + "learning_rate": 2.653635486559286e-06, + "loss": 0.5376, + "step": 5916 + }, + { + "epoch": 0.24416109598085334, + "grad_norm": 2.635469551394524, + "learning_rate": 2.6535073440013227e-06, + "loss": 0.5539, + "step": 5917 + }, + { + "epoch": 0.24420236032021128, + "grad_norm": 3.329153683806965, + "learning_rate": 2.653379180838833e-06, + "loss": 0.5991, + "step": 5918 + }, + { + "epoch": 0.2442436246595692, + "grad_norm": 70.53007380652586, + "learning_rate": 2.6532509970741053e-06, + "loss": 0.5326, + "step": 5919 + }, + { + "epoch": 0.24428488899892714, + "grad_norm": 3.095447153815282, + "learning_rate": 2.65312279270943e-06, + "loss": 0.5262, + "step": 5920 + }, + { + "epoch": 0.24432615333828506, + "grad_norm": 3.889019854390617, + "learning_rate": 2.6529945677470976e-06, + "loss": 0.5342, + "step": 5921 + }, + { + "epoch": 0.24436741767764297, + "grad_norm": 2.4618117992808144, + "learning_rate": 2.6528663221893975e-06, + "loss": 0.5978, + "step": 5922 + }, + { + "epoch": 0.24440868201700092, + "grad_norm": 4.235226646075335, + "learning_rate": 2.6527380560386205e-06, + "loss": 0.5144, + "step": 5923 + }, + { + "epoch": 0.24444994635635883, + "grad_norm": 2.500179603543645, + "learning_rate": 2.6526097692970583e-06, + "loss": 0.5411, + "step": 5924 + }, + { + "epoch": 0.24449121069571675, + "grad_norm": 17.430471269851818, + "learning_rate": 2.6524814619670025e-06, + "loss": 0.5322, + "step": 5925 + }, + { + "epoch": 0.2445324750350747, + "grad_norm": 2.9252860367369884, + "learning_rate": 2.6523531340507443e-06, + "loss": 0.5266, + "step": 5926 + }, + { + "epoch": 0.2445737393744326, + "grad_norm": 3.9359737055675654, + "learning_rate": 2.6522247855505763e-06, + "loss": 0.5785, + "step": 5927 + }, + { + "epoch": 0.24461500371379055, + "grad_norm": 4.469086086668981, + "learning_rate": 2.6520964164687918e-06, + "loss": 0.5823, + "step": 5928 + }, + { + "epoch": 0.24465626805314847, + "grad_norm": 2.227183724661378, + "learning_rate": 2.651968026807683e-06, + "loss": 0.5619, + "step": 5929 + }, + { + "epoch": 0.24469753239250638, + "grad_norm": 4.259094200035233, + "learning_rate": 2.651839616569543e-06, + "loss": 0.5675, + "step": 5930 + }, + { + "epoch": 0.24473879673186433, + "grad_norm": 8.653821598504482, + "learning_rate": 2.6517111857566666e-06, + "loss": 0.558, + "step": 5931 + }, + { + "epoch": 0.24478006107122224, + "grad_norm": 8.337425404991201, + "learning_rate": 2.651582734371347e-06, + "loss": 0.5279, + "step": 5932 + }, + { + "epoch": 0.2448213254105802, + "grad_norm": 3.504356282380527, + "learning_rate": 2.6514542624158786e-06, + "loss": 0.5431, + "step": 5933 + }, + { + "epoch": 0.2448625897499381, + "grad_norm": 2.168397186201425, + "learning_rate": 2.651325769892557e-06, + "loss": 0.5458, + "step": 5934 + }, + { + "epoch": 0.24490385408929602, + "grad_norm": 4.017019102957556, + "learning_rate": 2.651197256803677e-06, + "loss": 0.5758, + "step": 5935 + }, + { + "epoch": 0.24494511842865396, + "grad_norm": 5.381384333765569, + "learning_rate": 2.651068723151534e-06, + "loss": 0.5884, + "step": 5936 + }, + { + "epoch": 0.24498638276801188, + "grad_norm": 5.454261370027486, + "learning_rate": 2.650940168938424e-06, + "loss": 0.5605, + "step": 5937 + }, + { + "epoch": 0.24502764710736982, + "grad_norm": 2.3341606423412014, + "learning_rate": 2.650811594166643e-06, + "loss": 0.576, + "step": 5938 + }, + { + "epoch": 0.24506891144672774, + "grad_norm": 2.8776320749509923, + "learning_rate": 2.650682998838489e-06, + "loss": 0.5381, + "step": 5939 + }, + { + "epoch": 0.24511017578608565, + "grad_norm": 4.020992898004166, + "learning_rate": 2.6505543829562574e-06, + "loss": 0.5377, + "step": 5940 + }, + { + "epoch": 0.2451514401254436, + "grad_norm": 3.2531812749711584, + "learning_rate": 2.6504257465222467e-06, + "loss": 0.5562, + "step": 5941 + }, + { + "epoch": 0.24519270446480151, + "grad_norm": 3.5601680181304105, + "learning_rate": 2.650297089538754e-06, + "loss": 0.5409, + "step": 5942 + }, + { + "epoch": 0.24523396880415946, + "grad_norm": 4.887351499190979, + "learning_rate": 2.650168412008078e-06, + "loss": 0.5752, + "step": 5943 + }, + { + "epoch": 0.24527523314351737, + "grad_norm": 3.3633078359393758, + "learning_rate": 2.6500397139325164e-06, + "loss": 0.5421, + "step": 5944 + }, + { + "epoch": 0.2453164974828753, + "grad_norm": 2.870526091631835, + "learning_rate": 2.6499109953143682e-06, + "loss": 0.5446, + "step": 5945 + }, + { + "epoch": 0.24535776182223323, + "grad_norm": 6.9447976093597035, + "learning_rate": 2.649782256155934e-06, + "loss": 0.4863, + "step": 5946 + }, + { + "epoch": 0.24539902616159115, + "grad_norm": 6.227409354042247, + "learning_rate": 2.6496534964595117e-06, + "loss": 0.5114, + "step": 5947 + }, + { + "epoch": 0.2454402905009491, + "grad_norm": 3.8032687063201407, + "learning_rate": 2.649524716227402e-06, + "loss": 0.5582, + "step": 5948 + }, + { + "epoch": 0.245481554840307, + "grad_norm": 4.007562122193779, + "learning_rate": 2.649395915461906e-06, + "loss": 0.5122, + "step": 5949 + }, + { + "epoch": 0.24552281917966493, + "grad_norm": 3.070261401720644, + "learning_rate": 2.6492670941653227e-06, + "loss": 0.5206, + "step": 5950 + }, + { + "epoch": 0.24556408351902287, + "grad_norm": 5.522543020584145, + "learning_rate": 2.649138252339954e-06, + "loss": 0.55, + "step": 5951 + }, + { + "epoch": 0.24560534785838078, + "grad_norm": 7.2268250541582315, + "learning_rate": 2.6490093899881017e-06, + "loss": 0.5587, + "step": 5952 + }, + { + "epoch": 0.2456466121977387, + "grad_norm": 6.196232513738779, + "learning_rate": 2.648880507112068e-06, + "loss": 0.5292, + "step": 5953 + }, + { + "epoch": 0.24568787653709664, + "grad_norm": 5.02188466056114, + "learning_rate": 2.6487516037141532e-06, + "loss": 0.5233, + "step": 5954 + }, + { + "epoch": 0.24572914087645456, + "grad_norm": 3.027226296288428, + "learning_rate": 2.6486226797966613e-06, + "loss": 0.505, + "step": 5955 + }, + { + "epoch": 0.2457704052158125, + "grad_norm": 5.835759701385168, + "learning_rate": 2.6484937353618956e-06, + "loss": 0.4896, + "step": 5956 + }, + { + "epoch": 0.24581166955517042, + "grad_norm": 4.088731148025572, + "learning_rate": 2.648364770412158e-06, + "loss": 0.5661, + "step": 5957 + }, + { + "epoch": 0.24585293389452834, + "grad_norm": 8.18635585096858, + "learning_rate": 2.648235784949753e-06, + "loss": 0.5211, + "step": 5958 + }, + { + "epoch": 0.24589419823388628, + "grad_norm": 4.022696106733155, + "learning_rate": 2.648106778976985e-06, + "loss": 0.5697, + "step": 5959 + }, + { + "epoch": 0.2459354625732442, + "grad_norm": 3.714129277696372, + "learning_rate": 2.647977752496157e-06, + "loss": 0.5369, + "step": 5960 + }, + { + "epoch": 0.24597672691260214, + "grad_norm": 3.1827323640434866, + "learning_rate": 2.647848705509575e-06, + "loss": 0.5576, + "step": 5961 + }, + { + "epoch": 0.24601799125196006, + "grad_norm": 3.810752520611448, + "learning_rate": 2.6477196380195435e-06, + "loss": 0.6194, + "step": 5962 + }, + { + "epoch": 0.24605925559131797, + "grad_norm": 12.22624810368164, + "learning_rate": 2.6475905500283685e-06, + "loss": 0.5742, + "step": 5963 + }, + { + "epoch": 0.24610051993067592, + "grad_norm": 4.823244327582313, + "learning_rate": 2.6474614415383545e-06, + "loss": 0.5636, + "step": 5964 + }, + { + "epoch": 0.24614178427003383, + "grad_norm": 5.008323933233951, + "learning_rate": 2.6473323125518097e-06, + "loss": 0.5299, + "step": 5965 + }, + { + "epoch": 0.24618304860939177, + "grad_norm": 11.525894459143311, + "learning_rate": 2.6472031630710385e-06, + "loss": 0.5617, + "step": 5966 + }, + { + "epoch": 0.2462243129487497, + "grad_norm": 3.14851099859571, + "learning_rate": 2.64707399309835e-06, + "loss": 0.5483, + "step": 5967 + }, + { + "epoch": 0.2462655772881076, + "grad_norm": 4.574297600822141, + "learning_rate": 2.6469448026360503e-06, + "loss": 0.5499, + "step": 5968 + }, + { + "epoch": 0.24630684162746555, + "grad_norm": 3.458811374412756, + "learning_rate": 2.646815591686447e-06, + "loss": 0.544, + "step": 5969 + }, + { + "epoch": 0.24634810596682347, + "grad_norm": 4.410748418512664, + "learning_rate": 2.646686360251848e-06, + "loss": 0.5312, + "step": 5970 + }, + { + "epoch": 0.2463893703061814, + "grad_norm": 3.612307320496355, + "learning_rate": 2.6465571083345624e-06, + "loss": 0.5821, + "step": 5971 + }, + { + "epoch": 0.24643063464553933, + "grad_norm": 2.8301012050496226, + "learning_rate": 2.6464278359368985e-06, + "loss": 0.5689, + "step": 5972 + }, + { + "epoch": 0.24647189898489724, + "grad_norm": 11.95385285302927, + "learning_rate": 2.6462985430611654e-06, + "loss": 0.5366, + "step": 5973 + }, + { + "epoch": 0.24651316332425519, + "grad_norm": 3.4345826025946993, + "learning_rate": 2.646169229709673e-06, + "loss": 0.5527, + "step": 5974 + }, + { + "epoch": 0.2465544276636131, + "grad_norm": 10.487058050042119, + "learning_rate": 2.6460398958847307e-06, + "loss": 0.6358, + "step": 5975 + }, + { + "epoch": 0.24659569200297105, + "grad_norm": 4.110877195078657, + "learning_rate": 2.645910541588649e-06, + "loss": 0.5926, + "step": 5976 + }, + { + "epoch": 0.24663695634232896, + "grad_norm": 3.542444955007398, + "learning_rate": 2.645781166823738e-06, + "loss": 0.5902, + "step": 5977 + }, + { + "epoch": 0.24667822068168688, + "grad_norm": 2.0621010424486976, + "learning_rate": 2.64565177159231e-06, + "loss": 0.49, + "step": 5978 + }, + { + "epoch": 0.24671948502104482, + "grad_norm": 2.4155927185063955, + "learning_rate": 2.6455223558966744e-06, + "loss": 0.5277, + "step": 5979 + }, + { + "epoch": 0.24676074936040274, + "grad_norm": 3.172163588993262, + "learning_rate": 2.645392919739144e-06, + "loss": 0.4852, + "step": 5980 + }, + { + "epoch": 0.24680201369976068, + "grad_norm": 2.741604779369226, + "learning_rate": 2.6452634631220312e-06, + "loss": 0.505, + "step": 5981 + }, + { + "epoch": 0.2468432780391186, + "grad_norm": 3.676825564464248, + "learning_rate": 2.645133986047648e-06, + "loss": 0.6096, + "step": 5982 + }, + { + "epoch": 0.2468845423784765, + "grad_norm": 3.16617291147686, + "learning_rate": 2.6450044885183065e-06, + "loss": 0.5094, + "step": 5983 + }, + { + "epoch": 0.24692580671783446, + "grad_norm": 2.841987402547611, + "learning_rate": 2.644874970536321e-06, + "loss": 0.576, + "step": 5984 + }, + { + "epoch": 0.24696707105719237, + "grad_norm": 3.3551541356697596, + "learning_rate": 2.644745432104004e-06, + "loss": 0.5378, + "step": 5985 + }, + { + "epoch": 0.2470083353965503, + "grad_norm": 3.978974481064547, + "learning_rate": 2.6446158732236704e-06, + "loss": 0.5295, + "step": 5986 + }, + { + "epoch": 0.24704959973590823, + "grad_norm": 39.22070148937691, + "learning_rate": 2.6444862938976334e-06, + "loss": 0.5295, + "step": 5987 + }, + { + "epoch": 0.24709086407526615, + "grad_norm": 2.6831205759080876, + "learning_rate": 2.644356694128209e-06, + "loss": 0.5423, + "step": 5988 + }, + { + "epoch": 0.2471321284146241, + "grad_norm": 7.469111165334446, + "learning_rate": 2.6442270739177105e-06, + "loss": 0.5525, + "step": 5989 + }, + { + "epoch": 0.247173392753982, + "grad_norm": 2.7020310903461695, + "learning_rate": 2.644097433268454e-06, + "loss": 0.5222, + "step": 5990 + }, + { + "epoch": 0.24721465709333992, + "grad_norm": 3.514166131706017, + "learning_rate": 2.6439677721827554e-06, + "loss": 0.5803, + "step": 5991 + }, + { + "epoch": 0.24725592143269787, + "grad_norm": 4.804372830573381, + "learning_rate": 2.6438380906629306e-06, + "loss": 0.5334, + "step": 5992 + }, + { + "epoch": 0.24729718577205578, + "grad_norm": 7.8030059703357075, + "learning_rate": 2.6437083887112965e-06, + "loss": 0.5683, + "step": 5993 + }, + { + "epoch": 0.24733845011141373, + "grad_norm": 3.506788123766532, + "learning_rate": 2.643578666330169e-06, + "loss": 0.6518, + "step": 5994 + }, + { + "epoch": 0.24737971445077164, + "grad_norm": 4.506851926574622, + "learning_rate": 2.6434489235218656e-06, + "loss": 0.5629, + "step": 5995 + }, + { + "epoch": 0.24742097879012956, + "grad_norm": 5.021011796648596, + "learning_rate": 2.643319160288704e-06, + "loss": 0.5594, + "step": 5996 + }, + { + "epoch": 0.2474622431294875, + "grad_norm": 3.3177451960292985, + "learning_rate": 2.6431893766330025e-06, + "loss": 0.5189, + "step": 5997 + }, + { + "epoch": 0.24750350746884542, + "grad_norm": 3.234195873072929, + "learning_rate": 2.6430595725570788e-06, + "loss": 0.5774, + "step": 5998 + }, + { + "epoch": 0.24754477180820336, + "grad_norm": 5.085663068511008, + "learning_rate": 2.6429297480632514e-06, + "loss": 0.5375, + "step": 5999 + }, + { + "epoch": 0.24758603614756128, + "grad_norm": 3.508238243847468, + "learning_rate": 2.6427999031538395e-06, + "loss": 0.5562, + "step": 6000 + }, + { + "epoch": 0.2476273004869192, + "grad_norm": 2.167869886058602, + "learning_rate": 2.6426700378311627e-06, + "loss": 0.5444, + "step": 6001 + }, + { + "epoch": 0.24766856482627714, + "grad_norm": 5.3390915073912595, + "learning_rate": 2.64254015209754e-06, + "loss": 0.5383, + "step": 6002 + }, + { + "epoch": 0.24770982916563505, + "grad_norm": 9.5998919285055, + "learning_rate": 2.642410245955293e-06, + "loss": 0.5792, + "step": 6003 + }, + { + "epoch": 0.247751093504993, + "grad_norm": 3.1599115800029964, + "learning_rate": 2.6422803194067405e-06, + "loss": 0.5833, + "step": 6004 + }, + { + "epoch": 0.2477923578443509, + "grad_norm": 3.716270280721189, + "learning_rate": 2.642150372454204e-06, + "loss": 0.6179, + "step": 6005 + }, + { + "epoch": 0.24783362218370883, + "grad_norm": 2.3257971505677975, + "learning_rate": 2.6420204051000046e-06, + "loss": 0.597, + "step": 6006 + }, + { + "epoch": 0.24787488652306677, + "grad_norm": 3.144688637111845, + "learning_rate": 2.6418904173464635e-06, + "loss": 0.5665, + "step": 6007 + }, + { + "epoch": 0.2479161508624247, + "grad_norm": 3.0682965420496613, + "learning_rate": 2.6417604091959036e-06, + "loss": 0.5998, + "step": 6008 + }, + { + "epoch": 0.24795741520178263, + "grad_norm": 2.535021635493759, + "learning_rate": 2.641630380650646e-06, + "loss": 0.5391, + "step": 6009 + }, + { + "epoch": 0.24799867954114055, + "grad_norm": 24.465640656422682, + "learning_rate": 2.641500331713014e-06, + "loss": 0.574, + "step": 6010 + }, + { + "epoch": 0.24803994388049846, + "grad_norm": 2.8404502014798165, + "learning_rate": 2.6413702623853307e-06, + "loss": 0.4602, + "step": 6011 + }, + { + "epoch": 0.2480812082198564, + "grad_norm": 3.8827941926633653, + "learning_rate": 2.6412401726699195e-06, + "loss": 0.6095, + "step": 6012 + }, + { + "epoch": 0.24812247255921432, + "grad_norm": 2.4430551480123897, + "learning_rate": 2.6411100625691035e-06, + "loss": 0.5279, + "step": 6013 + }, + { + "epoch": 0.24816373689857224, + "grad_norm": 3.567916449737768, + "learning_rate": 2.640979932085207e-06, + "loss": 0.6326, + "step": 6014 + }, + { + "epoch": 0.24820500123793018, + "grad_norm": 14.208621978750637, + "learning_rate": 2.640849781220555e-06, + "loss": 0.5611, + "step": 6015 + }, + { + "epoch": 0.2482462655772881, + "grad_norm": 3.858540896747428, + "learning_rate": 2.640719609977472e-06, + "loss": 0.552, + "step": 6016 + }, + { + "epoch": 0.24828752991664604, + "grad_norm": 7.593671584059038, + "learning_rate": 2.6405894183582828e-06, + "loss": 0.5213, + "step": 6017 + }, + { + "epoch": 0.24832879425600396, + "grad_norm": 2.4030500161485713, + "learning_rate": 2.6404592063653135e-06, + "loss": 0.5191, + "step": 6018 + }, + { + "epoch": 0.24837005859536188, + "grad_norm": 4.054960227823338, + "learning_rate": 2.6403289740008896e-06, + "loss": 0.5367, + "step": 6019 + }, + { + "epoch": 0.24841132293471982, + "grad_norm": 8.097478728072716, + "learning_rate": 2.6401987212673372e-06, + "loss": 0.5228, + "step": 6020 + }, + { + "epoch": 0.24845258727407774, + "grad_norm": 3.1724238732152377, + "learning_rate": 2.6400684481669843e-06, + "loss": 0.5095, + "step": 6021 + }, + { + "epoch": 0.24849385161343568, + "grad_norm": 5.5419605434633326, + "learning_rate": 2.639938154702156e-06, + "loss": 0.6642, + "step": 6022 + }, + { + "epoch": 0.2485351159527936, + "grad_norm": 8.822287622313667, + "learning_rate": 2.639807840875181e-06, + "loss": 0.5836, + "step": 6023 + }, + { + "epoch": 0.2485763802921515, + "grad_norm": 3.2320646087572267, + "learning_rate": 2.6396775066883865e-06, + "loss": 0.5841, + "step": 6024 + }, + { + "epoch": 0.24861764463150945, + "grad_norm": 3.4604522124699013, + "learning_rate": 2.6395471521441005e-06, + "loss": 0.5626, + "step": 6025 + }, + { + "epoch": 0.24865890897086737, + "grad_norm": 2.819004184304796, + "learning_rate": 2.639416777244652e-06, + "loss": 0.5052, + "step": 6026 + }, + { + "epoch": 0.24870017331022531, + "grad_norm": 3.941510490894503, + "learning_rate": 2.639286381992369e-06, + "loss": 0.5436, + "step": 6027 + }, + { + "epoch": 0.24874143764958323, + "grad_norm": 10.037768828835544, + "learning_rate": 2.639155966389582e-06, + "loss": 0.4861, + "step": 6028 + }, + { + "epoch": 0.24878270198894115, + "grad_norm": 9.337109362818873, + "learning_rate": 2.639025530438619e-06, + "loss": 0.603, + "step": 6029 + }, + { + "epoch": 0.2488239663282991, + "grad_norm": 2.394227475710383, + "learning_rate": 2.6388950741418107e-06, + "loss": 0.5279, + "step": 6030 + }, + { + "epoch": 0.248865230667657, + "grad_norm": 5.652998470552393, + "learning_rate": 2.638764597501487e-06, + "loss": 0.5129, + "step": 6031 + }, + { + "epoch": 0.24890649500701495, + "grad_norm": 5.168377750046371, + "learning_rate": 2.6386341005199796e-06, + "loss": 0.4922, + "step": 6032 + }, + { + "epoch": 0.24894775934637287, + "grad_norm": 5.5329960443309005, + "learning_rate": 2.6385035831996183e-06, + "loss": 0.518, + "step": 6033 + }, + { + "epoch": 0.24898902368573078, + "grad_norm": 3.801775508161004, + "learning_rate": 2.638373045542735e-06, + "loss": 0.542, + "step": 6034 + }, + { + "epoch": 0.24903028802508873, + "grad_norm": 2.056627468608874, + "learning_rate": 2.638242487551661e-06, + "loss": 0.5413, + "step": 6035 + }, + { + "epoch": 0.24907155236444664, + "grad_norm": 37.60608347725797, + "learning_rate": 2.638111909228729e-06, + "loss": 0.5555, + "step": 6036 + }, + { + "epoch": 0.24911281670380458, + "grad_norm": 4.907245924208518, + "learning_rate": 2.6379813105762714e-06, + "loss": 0.5886, + "step": 6037 + }, + { + "epoch": 0.2491540810431625, + "grad_norm": 2.97471257035513, + "learning_rate": 2.637850691596621e-06, + "loss": 0.5534, + "step": 6038 + }, + { + "epoch": 0.24919534538252042, + "grad_norm": 2.4960151341230197, + "learning_rate": 2.63772005229211e-06, + "loss": 0.5557, + "step": 6039 + }, + { + "epoch": 0.24923660972187836, + "grad_norm": 3.3317250692267755, + "learning_rate": 2.637589392665073e-06, + "loss": 0.6123, + "step": 6040 + }, + { + "epoch": 0.24927787406123628, + "grad_norm": 2.6277081465610017, + "learning_rate": 2.637458712717844e-06, + "loss": 0.4867, + "step": 6041 + }, + { + "epoch": 0.24931913840059422, + "grad_norm": 3.26960208794888, + "learning_rate": 2.6373280124527566e-06, + "loss": 0.5866, + "step": 6042 + }, + { + "epoch": 0.24936040273995214, + "grad_norm": 4.183281553507914, + "learning_rate": 2.6371972918721457e-06, + "loss": 0.5564, + "step": 6043 + }, + { + "epoch": 0.24940166707931005, + "grad_norm": 2.7596909184648584, + "learning_rate": 2.637066550978346e-06, + "loss": 0.5303, + "step": 6044 + }, + { + "epoch": 0.249442931418668, + "grad_norm": 9.643921735289096, + "learning_rate": 2.636935789773694e-06, + "loss": 0.5362, + "step": 6045 + }, + { + "epoch": 0.2494841957580259, + "grad_norm": 6.483272835150003, + "learning_rate": 2.6368050082605243e-06, + "loss": 0.6014, + "step": 6046 + }, + { + "epoch": 0.24952546009738383, + "grad_norm": 2.6103071800309943, + "learning_rate": 2.6366742064411732e-06, + "loss": 0.5142, + "step": 6047 + }, + { + "epoch": 0.24956672443674177, + "grad_norm": 3.093201790138999, + "learning_rate": 2.6365433843179773e-06, + "loss": 0.5319, + "step": 6048 + }, + { + "epoch": 0.2496079887760997, + "grad_norm": 14.77712379799845, + "learning_rate": 2.6364125418932733e-06, + "loss": 0.5352, + "step": 6049 + }, + { + "epoch": 0.24964925311545763, + "grad_norm": 7.637820293312648, + "learning_rate": 2.6362816791693983e-06, + "loss": 0.5599, + "step": 6050 + }, + { + "epoch": 0.24969051745481555, + "grad_norm": 2.619994002057742, + "learning_rate": 2.63615079614869e-06, + "loss": 0.6061, + "step": 6051 + }, + { + "epoch": 0.24973178179417346, + "grad_norm": 5.655862836660375, + "learning_rate": 2.6360198928334868e-06, + "loss": 0.4981, + "step": 6052 + }, + { + "epoch": 0.2497730461335314, + "grad_norm": 2.1901700787211853, + "learning_rate": 2.6358889692261258e-06, + "loss": 0.4913, + "step": 6053 + }, + { + "epoch": 0.24981431047288932, + "grad_norm": 4.165772057504565, + "learning_rate": 2.6357580253289464e-06, + "loss": 0.6007, + "step": 6054 + }, + { + "epoch": 0.24985557481224727, + "grad_norm": 4.143902587201022, + "learning_rate": 2.6356270611442874e-06, + "loss": 0.4736, + "step": 6055 + }, + { + "epoch": 0.24989683915160518, + "grad_norm": 2.627124655285128, + "learning_rate": 2.6354960766744887e-06, + "loss": 0.4999, + "step": 6056 + }, + { + "epoch": 0.2499381034909631, + "grad_norm": 4.83413648329985, + "learning_rate": 2.6353650719218893e-06, + "loss": 0.5386, + "step": 6057 + }, + { + "epoch": 0.24997936783032104, + "grad_norm": 2.8778268691427855, + "learning_rate": 2.635234046888829e-06, + "loss": 0.5559, + "step": 6058 + }, + { + "epoch": 0.25002063216967896, + "grad_norm": 2.200934534228588, + "learning_rate": 2.635103001577649e-06, + "loss": 0.5525, + "step": 6059 + }, + { + "epoch": 0.2500618965090369, + "grad_norm": 2.98475946300329, + "learning_rate": 2.63497193599069e-06, + "loss": 0.5554, + "step": 6060 + }, + { + "epoch": 0.2501031608483948, + "grad_norm": 4.7027185402215, + "learning_rate": 2.6348408501302926e-06, + "loss": 0.5315, + "step": 6061 + }, + { + "epoch": 0.25014442518775276, + "grad_norm": 2.305563545378544, + "learning_rate": 2.6347097439987986e-06, + "loss": 0.5418, + "step": 6062 + }, + { + "epoch": 0.2501856895271107, + "grad_norm": 4.889466649613055, + "learning_rate": 2.6345786175985503e-06, + "loss": 0.5367, + "step": 6063 + }, + { + "epoch": 0.2502269538664686, + "grad_norm": 4.915176392236957, + "learning_rate": 2.6344474709318897e-06, + "loss": 0.6156, + "step": 6064 + }, + { + "epoch": 0.2502682182058265, + "grad_norm": 4.695428995610029, + "learning_rate": 2.634316304001159e-06, + "loss": 0.5936, + "step": 6065 + }, + { + "epoch": 0.2503094825451844, + "grad_norm": 59.42913216898104, + "learning_rate": 2.6341851168087017e-06, + "loss": 0.5816, + "step": 6066 + }, + { + "epoch": 0.2503507468845424, + "grad_norm": 2.6162267161949857, + "learning_rate": 2.634053909356861e-06, + "loss": 0.5508, + "step": 6067 + }, + { + "epoch": 0.2503920112239003, + "grad_norm": 2.414806530909033, + "learning_rate": 2.63392268164798e-06, + "loss": 0.5538, + "step": 6068 + }, + { + "epoch": 0.25043327556325823, + "grad_norm": 4.999730566108278, + "learning_rate": 2.6337914336844037e-06, + "loss": 0.526, + "step": 6069 + }, + { + "epoch": 0.25047453990261614, + "grad_norm": 3.362465984618938, + "learning_rate": 2.633660165468476e-06, + "loss": 0.5486, + "step": 6070 + }, + { + "epoch": 0.25051580424197406, + "grad_norm": 3.243290891491386, + "learning_rate": 2.633528877002542e-06, + "loss": 0.567, + "step": 6071 + }, + { + "epoch": 0.25055706858133203, + "grad_norm": 2.421732989203983, + "learning_rate": 2.633397568288946e-06, + "loss": 0.5338, + "step": 6072 + }, + { + "epoch": 0.25059833292068995, + "grad_norm": 2.665710960671018, + "learning_rate": 2.633266239330034e-06, + "loss": 0.532, + "step": 6073 + }, + { + "epoch": 0.25063959726004786, + "grad_norm": 4.69412515678306, + "learning_rate": 2.6331348901281527e-06, + "loss": 0.6039, + "step": 6074 + }, + { + "epoch": 0.2506808615994058, + "grad_norm": 2.0981635533419185, + "learning_rate": 2.6330035206856474e-06, + "loss": 0.5261, + "step": 6075 + }, + { + "epoch": 0.2507221259387637, + "grad_norm": 3.593484521007007, + "learning_rate": 2.632872131004865e-06, + "loss": 0.5801, + "step": 6076 + }, + { + "epoch": 0.25076339027812167, + "grad_norm": 5.6233831150310385, + "learning_rate": 2.632740721088152e-06, + "loss": 0.6135, + "step": 6077 + }, + { + "epoch": 0.2508046546174796, + "grad_norm": 11.34535681154045, + "learning_rate": 2.632609290937856e-06, + "loss": 0.5325, + "step": 6078 + }, + { + "epoch": 0.2508459189568375, + "grad_norm": 3.7564240496857852, + "learning_rate": 2.6324778405563248e-06, + "loss": 0.5397, + "step": 6079 + }, + { + "epoch": 0.2508871832961954, + "grad_norm": 13.34576886716933, + "learning_rate": 2.6323463699459064e-06, + "loss": 0.6018, + "step": 6080 + }, + { + "epoch": 0.25092844763555333, + "grad_norm": 4.687849051983757, + "learning_rate": 2.632214879108949e-06, + "loss": 0.5782, + "step": 6081 + }, + { + "epoch": 0.2509697119749113, + "grad_norm": 1.7817546782801967, + "learning_rate": 2.632083368047802e-06, + "loss": 0.5213, + "step": 6082 + }, + { + "epoch": 0.2510109763142692, + "grad_norm": 2.4781994938911036, + "learning_rate": 2.6319518367648133e-06, + "loss": 0.5334, + "step": 6083 + }, + { + "epoch": 0.25105224065362713, + "grad_norm": 2.4648611242945013, + "learning_rate": 2.6318202852623335e-06, + "loss": 0.4905, + "step": 6084 + }, + { + "epoch": 0.25109350499298505, + "grad_norm": 3.2801665757843095, + "learning_rate": 2.631688713542712e-06, + "loss": 0.5374, + "step": 6085 + }, + { + "epoch": 0.25113476933234297, + "grad_norm": 2.1780923427415324, + "learning_rate": 2.631557121608299e-06, + "loss": 0.5236, + "step": 6086 + }, + { + "epoch": 0.25117603367170094, + "grad_norm": 2.86898676123241, + "learning_rate": 2.631425509461445e-06, + "loss": 0.6221, + "step": 6087 + }, + { + "epoch": 0.25121729801105885, + "grad_norm": 2.291553168445203, + "learning_rate": 2.631293877104501e-06, + "loss": 0.6054, + "step": 6088 + }, + { + "epoch": 0.25125856235041677, + "grad_norm": 3.90389954528526, + "learning_rate": 2.631162224539818e-06, + "loss": 0.5429, + "step": 6089 + }, + { + "epoch": 0.2512998266897747, + "grad_norm": 4.771607275026976, + "learning_rate": 2.6310305517697487e-06, + "loss": 0.4882, + "step": 6090 + }, + { + "epoch": 0.2513410910291326, + "grad_norm": 4.791139152812161, + "learning_rate": 2.6308988587966438e-06, + "loss": 0.5279, + "step": 6091 + }, + { + "epoch": 0.2513823553684906, + "grad_norm": 2.4199843173526068, + "learning_rate": 2.6307671456228564e-06, + "loss": 0.533, + "step": 6092 + }, + { + "epoch": 0.2514236197078485, + "grad_norm": 1.7555920148713462, + "learning_rate": 2.630635412250739e-06, + "loss": 0.5187, + "step": 6093 + }, + { + "epoch": 0.2514648840472064, + "grad_norm": 3.128882470033057, + "learning_rate": 2.6305036586826446e-06, + "loss": 0.5271, + "step": 6094 + }, + { + "epoch": 0.2515061483865643, + "grad_norm": 3.427446123130288, + "learning_rate": 2.6303718849209267e-06, + "loss": 0.5284, + "step": 6095 + }, + { + "epoch": 0.25154741272592224, + "grad_norm": 4.478153468237651, + "learning_rate": 2.6302400909679395e-06, + "loss": 0.5516, + "step": 6096 + }, + { + "epoch": 0.2515886770652802, + "grad_norm": 3.072850395284121, + "learning_rate": 2.6301082768260367e-06, + "loss": 0.5081, + "step": 6097 + }, + { + "epoch": 0.2516299414046381, + "grad_norm": 4.205310061960093, + "learning_rate": 2.6299764424975727e-06, + "loss": 0.4941, + "step": 6098 + }, + { + "epoch": 0.25167120574399604, + "grad_norm": 3.244577054697195, + "learning_rate": 2.629844587984903e-06, + "loss": 0.5172, + "step": 6099 + }, + { + "epoch": 0.25171247008335396, + "grad_norm": 2.446202682763938, + "learning_rate": 2.6297127132903826e-06, + "loss": 0.551, + "step": 6100 + }, + { + "epoch": 0.2517537344227119, + "grad_norm": 3.4011032281376776, + "learning_rate": 2.629580818416367e-06, + "loss": 0.542, + "step": 6101 + }, + { + "epoch": 0.25179499876206984, + "grad_norm": 3.838008771276267, + "learning_rate": 2.629448903365212e-06, + "loss": 0.5439, + "step": 6102 + }, + { + "epoch": 0.25183626310142776, + "grad_norm": 3.7101084975860883, + "learning_rate": 2.629316968139274e-06, + "loss": 0.5558, + "step": 6103 + }, + { + "epoch": 0.2518775274407857, + "grad_norm": 4.871444357515427, + "learning_rate": 2.6291850127409103e-06, + "loss": 0.4497, + "step": 6104 + }, + { + "epoch": 0.2519187917801436, + "grad_norm": 6.096254843567659, + "learning_rate": 2.6290530371724772e-06, + "loss": 0.5661, + "step": 6105 + }, + { + "epoch": 0.2519600561195015, + "grad_norm": 3.90594863654739, + "learning_rate": 2.6289210414363324e-06, + "loss": 0.5409, + "step": 6106 + }, + { + "epoch": 0.2520013204588595, + "grad_norm": 2.022758601325279, + "learning_rate": 2.6287890255348336e-06, + "loss": 0.4746, + "step": 6107 + }, + { + "epoch": 0.2520425847982174, + "grad_norm": 15.044210398807616, + "learning_rate": 2.6286569894703393e-06, + "loss": 0.6368, + "step": 6108 + }, + { + "epoch": 0.2520838491375753, + "grad_norm": 3.311732501196355, + "learning_rate": 2.6285249332452076e-06, + "loss": 0.5383, + "step": 6109 + }, + { + "epoch": 0.2521251134769332, + "grad_norm": 2.806497408600538, + "learning_rate": 2.628392856861797e-06, + "loss": 0.5658, + "step": 6110 + }, + { + "epoch": 0.25216637781629114, + "grad_norm": 2.5501717744381778, + "learning_rate": 2.628260760322468e-06, + "loss": 0.5579, + "step": 6111 + }, + { + "epoch": 0.2522076421556491, + "grad_norm": 6.632511201328691, + "learning_rate": 2.6281286436295785e-06, + "loss": 0.568, + "step": 6112 + }, + { + "epoch": 0.25224890649500703, + "grad_norm": 2.5762574582196236, + "learning_rate": 2.6279965067854896e-06, + "loss": 0.4887, + "step": 6113 + }, + { + "epoch": 0.25229017083436495, + "grad_norm": 4.545657117744262, + "learning_rate": 2.6278643497925616e-06, + "loss": 0.553, + "step": 6114 + }, + { + "epoch": 0.25233143517372286, + "grad_norm": 2.0319310964497133, + "learning_rate": 2.6277321726531544e-06, + "loss": 0.6149, + "step": 6115 + }, + { + "epoch": 0.2523726995130808, + "grad_norm": 17.23690462528683, + "learning_rate": 2.6275999753696296e-06, + "loss": 0.5263, + "step": 6116 + }, + { + "epoch": 0.25241396385243875, + "grad_norm": 3.7725161628112933, + "learning_rate": 2.6274677579443486e-06, + "loss": 0.5555, + "step": 6117 + }, + { + "epoch": 0.25245522819179667, + "grad_norm": 4.736413009585346, + "learning_rate": 2.6273355203796727e-06, + "loss": 0.5681, + "step": 6118 + }, + { + "epoch": 0.2524964925311546, + "grad_norm": 6.1327871035134685, + "learning_rate": 2.627203262677964e-06, + "loss": 0.5567, + "step": 6119 + }, + { + "epoch": 0.2525377568705125, + "grad_norm": 3.0064582916817058, + "learning_rate": 2.6270709848415857e-06, + "loss": 0.5842, + "step": 6120 + }, + { + "epoch": 0.2525790212098704, + "grad_norm": 4.461159174220458, + "learning_rate": 2.6269386868728998e-06, + "loss": 0.5804, + "step": 6121 + }, + { + "epoch": 0.25262028554922833, + "grad_norm": 3.6817897780986937, + "learning_rate": 2.6268063687742696e-06, + "loss": 0.5753, + "step": 6122 + }, + { + "epoch": 0.2526615498885863, + "grad_norm": 4.698242782479887, + "learning_rate": 2.6266740305480598e-06, + "loss": 0.5993, + "step": 6123 + }, + { + "epoch": 0.2527028142279442, + "grad_norm": 2.414977238764456, + "learning_rate": 2.6265416721966325e-06, + "loss": 0.5257, + "step": 6124 + }, + { + "epoch": 0.25274407856730213, + "grad_norm": 9.586559979424237, + "learning_rate": 2.6264092937223528e-06, + "loss": 0.5754, + "step": 6125 + }, + { + "epoch": 0.25278534290666005, + "grad_norm": 2.44038950091775, + "learning_rate": 2.6262768951275855e-06, + "loss": 0.508, + "step": 6126 + }, + { + "epoch": 0.25282660724601796, + "grad_norm": 1.9288148487410683, + "learning_rate": 2.626144476414695e-06, + "loss": 0.5451, + "step": 6127 + }, + { + "epoch": 0.25286787158537594, + "grad_norm": 6.445089664864366, + "learning_rate": 2.626012037586047e-06, + "loss": 0.5477, + "step": 6128 + }, + { + "epoch": 0.25290913592473385, + "grad_norm": 4.590448426035364, + "learning_rate": 2.6258795786440076e-06, + "loss": 0.5318, + "step": 6129 + }, + { + "epoch": 0.25295040026409177, + "grad_norm": 2.709205252443633, + "learning_rate": 2.6257470995909423e-06, + "loss": 0.6075, + "step": 6130 + }, + { + "epoch": 0.2529916646034497, + "grad_norm": 6.935896208777948, + "learning_rate": 2.625614600429217e-06, + "loss": 0.5214, + "step": 6131 + }, + { + "epoch": 0.2530329289428076, + "grad_norm": 4.277878868504059, + "learning_rate": 2.6254820811611995e-06, + "loss": 0.4987, + "step": 6132 + }, + { + "epoch": 0.25307419328216557, + "grad_norm": 2.4332532924753503, + "learning_rate": 2.6253495417892565e-06, + "loss": 0.5035, + "step": 6133 + }, + { + "epoch": 0.2531154576215235, + "grad_norm": 4.858192889685355, + "learning_rate": 2.6252169823157555e-06, + "loss": 0.5286, + "step": 6134 + }, + { + "epoch": 0.2531567219608814, + "grad_norm": 3.158346717557337, + "learning_rate": 2.6250844027430648e-06, + "loss": 0.5871, + "step": 6135 + }, + { + "epoch": 0.2531979863002393, + "grad_norm": 6.148506484964213, + "learning_rate": 2.6249518030735517e-06, + "loss": 0.5609, + "step": 6136 + }, + { + "epoch": 0.25323925063959724, + "grad_norm": 7.909359878965102, + "learning_rate": 2.624819183309585e-06, + "loss": 0.5281, + "step": 6137 + }, + { + "epoch": 0.2532805149789552, + "grad_norm": 3.397280541820992, + "learning_rate": 2.624686543453534e-06, + "loss": 0.5454, + "step": 6138 + }, + { + "epoch": 0.2533217793183131, + "grad_norm": 3.313918715617488, + "learning_rate": 2.624553883507768e-06, + "loss": 0.4816, + "step": 6139 + }, + { + "epoch": 0.25336304365767104, + "grad_norm": 3.974588954241616, + "learning_rate": 2.6244212034746563e-06, + "loss": 0.5527, + "step": 6140 + }, + { + "epoch": 0.25340430799702895, + "grad_norm": 2.4551505422624267, + "learning_rate": 2.624288503356569e-06, + "loss": 0.54, + "step": 6141 + }, + { + "epoch": 0.25344557233638687, + "grad_norm": 2.4346564490340006, + "learning_rate": 2.624155783155876e-06, + "loss": 0.4905, + "step": 6142 + }, + { + "epoch": 0.25348683667574484, + "grad_norm": 3.654884819410341, + "learning_rate": 2.624023042874949e-06, + "loss": 0.5561, + "step": 6143 + }, + { + "epoch": 0.25352810101510276, + "grad_norm": 17.798350020380894, + "learning_rate": 2.6238902825161587e-06, + "loss": 0.5716, + "step": 6144 + }, + { + "epoch": 0.2535693653544607, + "grad_norm": 3.343474928625972, + "learning_rate": 2.6237575020818763e-06, + "loss": 0.5734, + "step": 6145 + }, + { + "epoch": 0.2536106296938186, + "grad_norm": 2.2045969955878553, + "learning_rate": 2.6236247015744733e-06, + "loss": 0.5508, + "step": 6146 + }, + { + "epoch": 0.2536518940331765, + "grad_norm": 4.162763521315174, + "learning_rate": 2.623491880996323e-06, + "loss": 0.519, + "step": 6147 + }, + { + "epoch": 0.2536931583725345, + "grad_norm": 2.960465168400276, + "learning_rate": 2.623359040349797e-06, + "loss": 0.606, + "step": 6148 + }, + { + "epoch": 0.2537344227118924, + "grad_norm": 24.519612388400642, + "learning_rate": 2.623226179637268e-06, + "loss": 0.5499, + "step": 6149 + }, + { + "epoch": 0.2537756870512503, + "grad_norm": 6.580202927384668, + "learning_rate": 2.6230932988611098e-06, + "loss": 0.6138, + "step": 6150 + }, + { + "epoch": 0.2538169513906082, + "grad_norm": 2.8261640895847764, + "learning_rate": 2.622960398023695e-06, + "loss": 0.5937, + "step": 6151 + }, + { + "epoch": 0.25385821572996614, + "grad_norm": 3.1595310135938757, + "learning_rate": 2.622827477127399e-06, + "loss": 0.5595, + "step": 6152 + }, + { + "epoch": 0.2538994800693241, + "grad_norm": 5.212506828820883, + "learning_rate": 2.6226945361745955e-06, + "loss": 0.5722, + "step": 6153 + }, + { + "epoch": 0.25394074440868203, + "grad_norm": 7.133722490896222, + "learning_rate": 2.622561575167659e-06, + "loss": 0.626, + "step": 6154 + }, + { + "epoch": 0.25398200874803994, + "grad_norm": 3.5441430062112493, + "learning_rate": 2.6224285941089643e-06, + "loss": 0.5319, + "step": 6155 + }, + { + "epoch": 0.25402327308739786, + "grad_norm": 3.2918770423956407, + "learning_rate": 2.6222955930008867e-06, + "loss": 0.543, + "step": 6156 + }, + { + "epoch": 0.2540645374267558, + "grad_norm": 2.9202300138422705, + "learning_rate": 2.622162571845803e-06, + "loss": 0.5995, + "step": 6157 + }, + { + "epoch": 0.25410580176611375, + "grad_norm": 3.983941387790638, + "learning_rate": 2.6220295306460877e-06, + "loss": 0.5437, + "step": 6158 + }, + { + "epoch": 0.25414706610547166, + "grad_norm": 10.443367731524964, + "learning_rate": 2.6218964694041186e-06, + "loss": 0.4987, + "step": 6159 + }, + { + "epoch": 0.2541883304448296, + "grad_norm": 3.248882367766884, + "learning_rate": 2.6217633881222724e-06, + "loss": 0.5206, + "step": 6160 + }, + { + "epoch": 0.2542295947841875, + "grad_norm": 4.302927849172253, + "learning_rate": 2.621630286802925e-06, + "loss": 0.5581, + "step": 6161 + }, + { + "epoch": 0.2542708591235454, + "grad_norm": 2.7146166413577446, + "learning_rate": 2.6214971654484552e-06, + "loss": 0.5789, + "step": 6162 + }, + { + "epoch": 0.2543121234629034, + "grad_norm": 5.123224191088569, + "learning_rate": 2.6213640240612404e-06, + "loss": 0.4769, + "step": 6163 + }, + { + "epoch": 0.2543533878022613, + "grad_norm": 11.829769950344643, + "learning_rate": 2.621230862643659e-06, + "loss": 0.579, + "step": 6164 + }, + { + "epoch": 0.2543946521416192, + "grad_norm": 3.2200687089370343, + "learning_rate": 2.6210976811980894e-06, + "loss": 0.5295, + "step": 6165 + }, + { + "epoch": 0.25443591648097713, + "grad_norm": 2.8405010820358765, + "learning_rate": 2.620964479726911e-06, + "loss": 0.5661, + "step": 6166 + }, + { + "epoch": 0.25447718082033505, + "grad_norm": 4.685750072709982, + "learning_rate": 2.620831258232502e-06, + "loss": 0.5636, + "step": 6167 + }, + { + "epoch": 0.254518445159693, + "grad_norm": 4.141896872244165, + "learning_rate": 2.620698016717244e-06, + "loss": 0.5462, + "step": 6168 + }, + { + "epoch": 0.25455970949905093, + "grad_norm": 11.656835153129459, + "learning_rate": 2.620564755183515e-06, + "loss": 0.5725, + "step": 6169 + }, + { + "epoch": 0.25460097383840885, + "grad_norm": 2.9394500519588824, + "learning_rate": 2.6204314736336963e-06, + "loss": 0.5674, + "step": 6170 + }, + { + "epoch": 0.25464223817776677, + "grad_norm": 3.1076104312856865, + "learning_rate": 2.620298172070169e-06, + "loss": 0.5513, + "step": 6171 + }, + { + "epoch": 0.2546835025171247, + "grad_norm": 7.041691186145762, + "learning_rate": 2.620164850495313e-06, + "loss": 0.5347, + "step": 6172 + }, + { + "epoch": 0.25472476685648265, + "grad_norm": 2.3575580179302107, + "learning_rate": 2.620031508911511e-06, + "loss": 0.5646, + "step": 6173 + }, + { + "epoch": 0.25476603119584057, + "grad_norm": 2.900039399682943, + "learning_rate": 2.6198981473211448e-06, + "loss": 0.6016, + "step": 6174 + }, + { + "epoch": 0.2548072955351985, + "grad_norm": 2.5719364103331763, + "learning_rate": 2.6197647657265957e-06, + "loss": 0.4916, + "step": 6175 + }, + { + "epoch": 0.2548485598745564, + "grad_norm": 3.6471635250969476, + "learning_rate": 2.619631364130247e-06, + "loss": 0.606, + "step": 6176 + }, + { + "epoch": 0.2548898242139143, + "grad_norm": 2.34938523603495, + "learning_rate": 2.6194979425344808e-06, + "loss": 0.6077, + "step": 6177 + }, + { + "epoch": 0.2549310885532723, + "grad_norm": 2.837544777674979, + "learning_rate": 2.619364500941681e-06, + "loss": 0.5925, + "step": 6178 + }, + { + "epoch": 0.2549723528926302, + "grad_norm": 2.7572931455340512, + "learning_rate": 2.6192310393542308e-06, + "loss": 0.5641, + "step": 6179 + }, + { + "epoch": 0.2550136172319881, + "grad_norm": 11.346547676225235, + "learning_rate": 2.619097557774515e-06, + "loss": 0.5368, + "step": 6180 + }, + { + "epoch": 0.25505488157134604, + "grad_norm": 4.815309934558337, + "learning_rate": 2.6189640562049162e-06, + "loss": 0.5791, + "step": 6181 + }, + { + "epoch": 0.25509614591070395, + "grad_norm": 4.1833151348786926, + "learning_rate": 2.6188305346478208e-06, + "loss": 0.6174, + "step": 6182 + }, + { + "epoch": 0.25513741025006187, + "grad_norm": 16.3647615238241, + "learning_rate": 2.618696993105613e-06, + "loss": 0.5183, + "step": 6183 + }, + { + "epoch": 0.25517867458941984, + "grad_norm": 3.839759756221715, + "learning_rate": 2.6185634315806786e-06, + "loss": 0.5201, + "step": 6184 + }, + { + "epoch": 0.25521993892877776, + "grad_norm": 4.47855916891213, + "learning_rate": 2.6184298500754027e-06, + "loss": 0.5289, + "step": 6185 + }, + { + "epoch": 0.2552612032681357, + "grad_norm": 3.0946158708634335, + "learning_rate": 2.6182962485921725e-06, + "loss": 0.5628, + "step": 6186 + }, + { + "epoch": 0.2553024676074936, + "grad_norm": 2.174790467232246, + "learning_rate": 2.6181626271333732e-06, + "loss": 0.531, + "step": 6187 + }, + { + "epoch": 0.2553437319468515, + "grad_norm": 5.9028236229186195, + "learning_rate": 2.618028985701392e-06, + "loss": 0.5593, + "step": 6188 + }, + { + "epoch": 0.2553849962862095, + "grad_norm": 4.615834587552535, + "learning_rate": 2.6178953242986167e-06, + "loss": 0.5739, + "step": 6189 + }, + { + "epoch": 0.2554262606255674, + "grad_norm": 2.9224778190766463, + "learning_rate": 2.617761642927434e-06, + "loss": 0.5512, + "step": 6190 + }, + { + "epoch": 0.2554675249649253, + "grad_norm": 4.178827451022909, + "learning_rate": 2.6176279415902326e-06, + "loss": 0.5678, + "step": 6191 + }, + { + "epoch": 0.2555087893042832, + "grad_norm": 8.911261449922472, + "learning_rate": 2.6174942202894e-06, + "loss": 0.5695, + "step": 6192 + }, + { + "epoch": 0.25555005364364114, + "grad_norm": 4.739131116093358, + "learning_rate": 2.6173604790273253e-06, + "loss": 0.5811, + "step": 6193 + }, + { + "epoch": 0.2555913179829991, + "grad_norm": 2.6430016430826524, + "learning_rate": 2.6172267178063965e-06, + "loss": 0.534, + "step": 6194 + }, + { + "epoch": 0.255632582322357, + "grad_norm": 3.8850589524575434, + "learning_rate": 2.6170929366290042e-06, + "loss": 0.5562, + "step": 6195 + }, + { + "epoch": 0.25567384666171494, + "grad_norm": 6.005919544815513, + "learning_rate": 2.6169591354975376e-06, + "loss": 0.586, + "step": 6196 + }, + { + "epoch": 0.25571511100107286, + "grad_norm": 3.2682977715381045, + "learning_rate": 2.6168253144143865e-06, + "loss": 0.5035, + "step": 6197 + }, + { + "epoch": 0.2557563753404308, + "grad_norm": 3.766607325937166, + "learning_rate": 2.6166914733819413e-06, + "loss": 0.5602, + "step": 6198 + }, + { + "epoch": 0.25579763967978875, + "grad_norm": 2.649647548955523, + "learning_rate": 2.616557612402593e-06, + "loss": 0.5137, + "step": 6199 + }, + { + "epoch": 0.25583890401914666, + "grad_norm": 2.4219065118003504, + "learning_rate": 2.6164237314787323e-06, + "loss": 0.5104, + "step": 6200 + }, + { + "epoch": 0.2558801683585046, + "grad_norm": 2.13918081966217, + "learning_rate": 2.6162898306127513e-06, + "loss": 0.535, + "step": 6201 + }, + { + "epoch": 0.2559214326978625, + "grad_norm": 7.231893528987943, + "learning_rate": 2.6161559098070412e-06, + "loss": 0.549, + "step": 6202 + }, + { + "epoch": 0.2559626970372204, + "grad_norm": 3.7062475382318847, + "learning_rate": 2.616021969063994e-06, + "loss": 0.6043, + "step": 6203 + }, + { + "epoch": 0.2560039613765784, + "grad_norm": 3.982302865288918, + "learning_rate": 2.6158880083860034e-06, + "loss": 0.5933, + "step": 6204 + }, + { + "epoch": 0.2560452257159363, + "grad_norm": 2.6226624336020623, + "learning_rate": 2.6157540277754606e-06, + "loss": 0.5143, + "step": 6205 + }, + { + "epoch": 0.2560864900552942, + "grad_norm": 10.642130215874133, + "learning_rate": 2.61562002723476e-06, + "loss": 0.6161, + "step": 6206 + }, + { + "epoch": 0.25612775439465213, + "grad_norm": 2.9083138680364873, + "learning_rate": 2.615486006766295e-06, + "loss": 0.5201, + "step": 6207 + }, + { + "epoch": 0.25616901873401005, + "grad_norm": 2.4897277262256208, + "learning_rate": 2.6153519663724595e-06, + "loss": 0.5606, + "step": 6208 + }, + { + "epoch": 0.256210283073368, + "grad_norm": 3.276853328814789, + "learning_rate": 2.6152179060556474e-06, + "loss": 0.5509, + "step": 6209 + }, + { + "epoch": 0.25625154741272593, + "grad_norm": 3.2007475930486247, + "learning_rate": 2.6150838258182536e-06, + "loss": 0.5256, + "step": 6210 + }, + { + "epoch": 0.25629281175208385, + "grad_norm": 6.2588251830199155, + "learning_rate": 2.6149497256626733e-06, + "loss": 0.5279, + "step": 6211 + }, + { + "epoch": 0.25633407609144176, + "grad_norm": 2.9679915842049325, + "learning_rate": 2.6148156055913015e-06, + "loss": 0.5393, + "step": 6212 + }, + { + "epoch": 0.2563753404307997, + "grad_norm": 2.135076496639015, + "learning_rate": 2.6146814656065346e-06, + "loss": 0.5598, + "step": 6213 + }, + { + "epoch": 0.25641660477015765, + "grad_norm": 2.559441231382602, + "learning_rate": 2.614547305710768e-06, + "loss": 0.514, + "step": 6214 + }, + { + "epoch": 0.25645786910951557, + "grad_norm": 7.982351193939308, + "learning_rate": 2.6144131259063985e-06, + "loss": 0.5499, + "step": 6215 + }, + { + "epoch": 0.2564991334488735, + "grad_norm": 3.600853806766728, + "learning_rate": 2.6142789261958227e-06, + "loss": 0.5382, + "step": 6216 + }, + { + "epoch": 0.2565403977882314, + "grad_norm": 4.669524996353418, + "learning_rate": 2.6141447065814376e-06, + "loss": 0.5295, + "step": 6217 + }, + { + "epoch": 0.2565816621275893, + "grad_norm": 3.754317542879369, + "learning_rate": 2.6140104670656407e-06, + "loss": 0.565, + "step": 6218 + }, + { + "epoch": 0.2566229264669473, + "grad_norm": 2.9535147249892835, + "learning_rate": 2.6138762076508303e-06, + "loss": 0.5573, + "step": 6219 + }, + { + "epoch": 0.2566641908063052, + "grad_norm": 9.095035860666771, + "learning_rate": 2.6137419283394045e-06, + "loss": 0.5511, + "step": 6220 + }, + { + "epoch": 0.2567054551456631, + "grad_norm": 3.325425835263813, + "learning_rate": 2.613607629133761e-06, + "loss": 0.4769, + "step": 6221 + }, + { + "epoch": 0.25674671948502104, + "grad_norm": 31.9103891601402, + "learning_rate": 2.6134733100363e-06, + "loss": 0.5029, + "step": 6222 + }, + { + "epoch": 0.25678798382437895, + "grad_norm": 3.134773754922942, + "learning_rate": 2.61333897104942e-06, + "loss": 0.5857, + "step": 6223 + }, + { + "epoch": 0.2568292481637369, + "grad_norm": 2.510548345930358, + "learning_rate": 2.6132046121755213e-06, + "loss": 0.5453, + "step": 6224 + }, + { + "epoch": 0.25687051250309484, + "grad_norm": 4.766540987422048, + "learning_rate": 2.6130702334170027e-06, + "loss": 0.5374, + "step": 6225 + }, + { + "epoch": 0.25691177684245275, + "grad_norm": 3.6621758289427504, + "learning_rate": 2.612935834776266e-06, + "loss": 0.5528, + "step": 6226 + }, + { + "epoch": 0.25695304118181067, + "grad_norm": 3.4043829648413175, + "learning_rate": 2.6128014162557106e-06, + "loss": 0.5836, + "step": 6227 + }, + { + "epoch": 0.2569943055211686, + "grad_norm": 2.7827455253689837, + "learning_rate": 2.6126669778577385e-06, + "loss": 0.5292, + "step": 6228 + }, + { + "epoch": 0.25703556986052656, + "grad_norm": 2.6032886633645322, + "learning_rate": 2.6125325195847503e-06, + "loss": 0.5781, + "step": 6229 + }, + { + "epoch": 0.2570768341998845, + "grad_norm": 3.7379097847898786, + "learning_rate": 2.6123980414391486e-06, + "loss": 0.4975, + "step": 6230 + }, + { + "epoch": 0.2571180985392424, + "grad_norm": 3.93380720994128, + "learning_rate": 2.6122635434233347e-06, + "loss": 0.6048, + "step": 6231 + }, + { + "epoch": 0.2571593628786003, + "grad_norm": 3.6128963917935653, + "learning_rate": 2.6121290255397117e-06, + "loss": 0.5066, + "step": 6232 + }, + { + "epoch": 0.2572006272179582, + "grad_norm": 2.461027088514583, + "learning_rate": 2.611994487790682e-06, + "loss": 0.5251, + "step": 6233 + }, + { + "epoch": 0.2572418915573162, + "grad_norm": 4.793090391896602, + "learning_rate": 2.611859930178649e-06, + "loss": 0.5608, + "step": 6234 + }, + { + "epoch": 0.2572831558966741, + "grad_norm": 2.3964885911990907, + "learning_rate": 2.6117253527060163e-06, + "loss": 0.5189, + "step": 6235 + }, + { + "epoch": 0.257324420236032, + "grad_norm": 7.349619309316555, + "learning_rate": 2.611590755375188e-06, + "loss": 0.539, + "step": 6236 + }, + { + "epoch": 0.25736568457538994, + "grad_norm": 2.8390531778223465, + "learning_rate": 2.611456138188567e-06, + "loss": 0.5521, + "step": 6237 + }, + { + "epoch": 0.25740694891474786, + "grad_norm": 4.809549057304099, + "learning_rate": 2.6113215011485597e-06, + "loss": 0.5917, + "step": 6238 + }, + { + "epoch": 0.25744821325410583, + "grad_norm": 2.374327383866422, + "learning_rate": 2.6111868442575707e-06, + "loss": 0.5311, + "step": 6239 + }, + { + "epoch": 0.25748947759346374, + "grad_norm": 2.150103503056882, + "learning_rate": 2.6110521675180044e-06, + "loss": 0.5114, + "step": 6240 + }, + { + "epoch": 0.25753074193282166, + "grad_norm": 3.340655992514767, + "learning_rate": 2.610917470932267e-06, + "loss": 0.5314, + "step": 6241 + }, + { + "epoch": 0.2575720062721796, + "grad_norm": 4.446192096242744, + "learning_rate": 2.610782754502764e-06, + "loss": 0.5972, + "step": 6242 + }, + { + "epoch": 0.2576132706115375, + "grad_norm": 3.3298223904708157, + "learning_rate": 2.610648018231903e-06, + "loss": 0.6258, + "step": 6243 + }, + { + "epoch": 0.2576545349508954, + "grad_norm": 3.3519342509561025, + "learning_rate": 2.6105132621220895e-06, + "loss": 0.5033, + "step": 6244 + }, + { + "epoch": 0.2576957992902534, + "grad_norm": 10.971595309624798, + "learning_rate": 2.610378486175731e-06, + "loss": 0.5669, + "step": 6245 + }, + { + "epoch": 0.2577370636296113, + "grad_norm": 6.938659751471953, + "learning_rate": 2.610243690395235e-06, + "loss": 0.5156, + "step": 6246 + }, + { + "epoch": 0.2577783279689692, + "grad_norm": 3.074212861949869, + "learning_rate": 2.6101088747830104e-06, + "loss": 0.5603, + "step": 6247 + }, + { + "epoch": 0.25781959230832713, + "grad_norm": 5.07081522671309, + "learning_rate": 2.6099740393414626e-06, + "loss": 0.5175, + "step": 6248 + }, + { + "epoch": 0.25786085664768504, + "grad_norm": 3.5378524695365776, + "learning_rate": 2.6098391840730026e-06, + "loss": 0.5942, + "step": 6249 + }, + { + "epoch": 0.257902120987043, + "grad_norm": 3.848472364661738, + "learning_rate": 2.6097043089800384e-06, + "loss": 0.5553, + "step": 6250 + }, + { + "epoch": 0.25794338532640093, + "grad_norm": 4.651463814555523, + "learning_rate": 2.609569414064979e-06, + "loss": 0.5882, + "step": 6251 + }, + { + "epoch": 0.25798464966575885, + "grad_norm": 3.4633989957551843, + "learning_rate": 2.6094344993302343e-06, + "loss": 0.5892, + "step": 6252 + }, + { + "epoch": 0.25802591400511676, + "grad_norm": 4.215839783922422, + "learning_rate": 2.609299564778214e-06, + "loss": 0.5423, + "step": 6253 + }, + { + "epoch": 0.2580671783444747, + "grad_norm": 5.139860277909309, + "learning_rate": 2.609164610411328e-06, + "loss": 0.5112, + "step": 6254 + }, + { + "epoch": 0.25810844268383265, + "grad_norm": 8.565412779557935, + "learning_rate": 2.609029636231988e-06, + "loss": 0.5448, + "step": 6255 + }, + { + "epoch": 0.25814970702319057, + "grad_norm": 3.460182552601457, + "learning_rate": 2.608894642242604e-06, + "loss": 0.5926, + "step": 6256 + }, + { + "epoch": 0.2581909713625485, + "grad_norm": 4.182951995915998, + "learning_rate": 2.6087596284455876e-06, + "loss": 0.5316, + "step": 6257 + }, + { + "epoch": 0.2582322357019064, + "grad_norm": 2.9430206587907786, + "learning_rate": 2.6086245948433513e-06, + "loss": 0.5137, + "step": 6258 + }, + { + "epoch": 0.2582735000412643, + "grad_norm": 3.235718189576313, + "learning_rate": 2.6084895414383054e-06, + "loss": 0.498, + "step": 6259 + }, + { + "epoch": 0.2583147643806223, + "grad_norm": 6.440504006562299, + "learning_rate": 2.6083544682328632e-06, + "loss": 0.5094, + "step": 6260 + }, + { + "epoch": 0.2583560287199802, + "grad_norm": 3.261122087863846, + "learning_rate": 2.6082193752294383e-06, + "loss": 0.5607, + "step": 6261 + }, + { + "epoch": 0.2583972930593381, + "grad_norm": 2.8055350223876045, + "learning_rate": 2.6080842624304426e-06, + "loss": 0.5704, + "step": 6262 + }, + { + "epoch": 0.25843855739869603, + "grad_norm": 2.6862570337138694, + "learning_rate": 2.60794912983829e-06, + "loss": 0.5677, + "step": 6263 + }, + { + "epoch": 0.25847982173805395, + "grad_norm": 2.287199093000013, + "learning_rate": 2.6078139774553946e-06, + "loss": 0.5217, + "step": 6264 + }, + { + "epoch": 0.2585210860774119, + "grad_norm": 3.0967947927867763, + "learning_rate": 2.6076788052841695e-06, + "loss": 0.5456, + "step": 6265 + }, + { + "epoch": 0.25856235041676984, + "grad_norm": 2.8414706155343734, + "learning_rate": 2.6075436133270307e-06, + "loss": 0.4621, + "step": 6266 + }, + { + "epoch": 0.25860361475612775, + "grad_norm": 4.66699528557207, + "learning_rate": 2.607408401586392e-06, + "loss": 0.4959, + "step": 6267 + }, + { + "epoch": 0.25864487909548567, + "grad_norm": 16.882993188607745, + "learning_rate": 2.6072731700646687e-06, + "loss": 0.5513, + "step": 6268 + }, + { + "epoch": 0.2586861434348436, + "grad_norm": 42.71081804704625, + "learning_rate": 2.607137918764277e-06, + "loss": 0.598, + "step": 6269 + }, + { + "epoch": 0.25872740777420156, + "grad_norm": 2.7369386174817834, + "learning_rate": 2.607002647687632e-06, + "loss": 0.6344, + "step": 6270 + }, + { + "epoch": 0.2587686721135595, + "grad_norm": 3.565720296307447, + "learning_rate": 2.606867356837151e-06, + "loss": 0.5937, + "step": 6271 + }, + { + "epoch": 0.2588099364529174, + "grad_norm": 1.7369143464013723, + "learning_rate": 2.6067320462152497e-06, + "loss": 0.5534, + "step": 6272 + }, + { + "epoch": 0.2588512007922753, + "grad_norm": 3.2461677881926247, + "learning_rate": 2.6065967158243454e-06, + "loss": 0.5203, + "step": 6273 + }, + { + "epoch": 0.2588924651316332, + "grad_norm": 2.4930584985198188, + "learning_rate": 2.6064613656668554e-06, + "loss": 0.5606, + "step": 6274 + }, + { + "epoch": 0.2589337294709912, + "grad_norm": 2.801878332785259, + "learning_rate": 2.606325995745198e-06, + "loss": 0.551, + "step": 6275 + }, + { + "epoch": 0.2589749938103491, + "grad_norm": 3.6899226102002527, + "learning_rate": 2.6061906060617904e-06, + "loss": 0.5544, + "step": 6276 + }, + { + "epoch": 0.259016258149707, + "grad_norm": 4.937238234351172, + "learning_rate": 2.606055196619051e-06, + "loss": 0.5319, + "step": 6277 + }, + { + "epoch": 0.25905752248906494, + "grad_norm": 3.7640305491755752, + "learning_rate": 2.6059197674193996e-06, + "loss": 0.5775, + "step": 6278 + }, + { + "epoch": 0.25909878682842286, + "grad_norm": 5.002822502273103, + "learning_rate": 2.6057843184652538e-06, + "loss": 0.5542, + "step": 6279 + }, + { + "epoch": 0.2591400511677808, + "grad_norm": 2.481992232541116, + "learning_rate": 2.605648849759034e-06, + "loss": 0.5149, + "step": 6280 + }, + { + "epoch": 0.25918131550713874, + "grad_norm": 4.179736999205696, + "learning_rate": 2.6055133613031602e-06, + "loss": 0.6224, + "step": 6281 + }, + { + "epoch": 0.25922257984649666, + "grad_norm": 2.9693947328981958, + "learning_rate": 2.6053778531000516e-06, + "loss": 0.5865, + "step": 6282 + }, + { + "epoch": 0.2592638441858546, + "grad_norm": 2.7721885262233514, + "learning_rate": 2.60524232515213e-06, + "loss": 0.5815, + "step": 6283 + }, + { + "epoch": 0.2593051085252125, + "grad_norm": 2.6262467408214425, + "learning_rate": 2.605106777461815e-06, + "loss": 0.5371, + "step": 6284 + }, + { + "epoch": 0.25934637286457046, + "grad_norm": 14.540149367194918, + "learning_rate": 2.6049712100315283e-06, + "loss": 0.5296, + "step": 6285 + }, + { + "epoch": 0.2593876372039284, + "grad_norm": 2.1664161739446635, + "learning_rate": 2.6048356228636923e-06, + "loss": 0.532, + "step": 6286 + }, + { + "epoch": 0.2594289015432863, + "grad_norm": 4.457871298926561, + "learning_rate": 2.6047000159607276e-06, + "loss": 0.53, + "step": 6287 + }, + { + "epoch": 0.2594701658826442, + "grad_norm": 9.031866322824673, + "learning_rate": 2.6045643893250573e-06, + "loss": 0.5522, + "step": 6288 + }, + { + "epoch": 0.2595114302220021, + "grad_norm": 2.326895847345305, + "learning_rate": 2.6044287429591037e-06, + "loss": 0.4946, + "step": 6289 + }, + { + "epoch": 0.2595526945613601, + "grad_norm": 2.5783739711889337, + "learning_rate": 2.60429307686529e-06, + "loss": 0.5385, + "step": 6290 + }, + { + "epoch": 0.259593958900718, + "grad_norm": 2.448475414131415, + "learning_rate": 2.6041573910460397e-06, + "loss": 0.5363, + "step": 6291 + }, + { + "epoch": 0.25963522324007593, + "grad_norm": 3.5605782042015424, + "learning_rate": 2.6040216855037756e-06, + "loss": 0.5652, + "step": 6292 + }, + { + "epoch": 0.25967648757943385, + "grad_norm": 3.236107931469498, + "learning_rate": 2.603885960240923e-06, + "loss": 0.5375, + "step": 6293 + }, + { + "epoch": 0.25971775191879176, + "grad_norm": 3.0351453667054344, + "learning_rate": 2.603750215259905e-06, + "loss": 0.5503, + "step": 6294 + }, + { + "epoch": 0.25975901625814973, + "grad_norm": 7.101825638332406, + "learning_rate": 2.6036144505631474e-06, + "loss": 0.5452, + "step": 6295 + }, + { + "epoch": 0.25980028059750765, + "grad_norm": 9.908488071431108, + "learning_rate": 2.6034786661530746e-06, + "loss": 0.5985, + "step": 6296 + }, + { + "epoch": 0.25984154493686556, + "grad_norm": 4.297035097275456, + "learning_rate": 2.6033428620321127e-06, + "loss": 0.5282, + "step": 6297 + }, + { + "epoch": 0.2598828092762235, + "grad_norm": 3.408218122938126, + "learning_rate": 2.603207038202687e-06, + "loss": 0.5816, + "step": 6298 + }, + { + "epoch": 0.2599240736155814, + "grad_norm": 3.867716713358346, + "learning_rate": 2.603071194667224e-06, + "loss": 0.5765, + "step": 6299 + }, + { + "epoch": 0.25996533795493937, + "grad_norm": 3.8165102918853457, + "learning_rate": 2.6029353314281498e-06, + "loss": 0.5884, + "step": 6300 + }, + { + "epoch": 0.2600066022942973, + "grad_norm": 2.658103936924869, + "learning_rate": 2.6027994484878914e-06, + "loss": 0.483, + "step": 6301 + }, + { + "epoch": 0.2600478666336552, + "grad_norm": 2.809934169386696, + "learning_rate": 2.602663545848876e-06, + "loss": 0.5494, + "step": 6302 + }, + { + "epoch": 0.2600891309730131, + "grad_norm": 4.109879705954771, + "learning_rate": 2.6025276235135315e-06, + "loss": 0.5076, + "step": 6303 + }, + { + "epoch": 0.26013039531237103, + "grad_norm": 4.504446678178345, + "learning_rate": 2.6023916814842856e-06, + "loss": 0.584, + "step": 6304 + }, + { + "epoch": 0.26017165965172895, + "grad_norm": 7.085311731941595, + "learning_rate": 2.6022557197635664e-06, + "loss": 0.5854, + "step": 6305 + }, + { + "epoch": 0.2602129239910869, + "grad_norm": 2.6842685481332897, + "learning_rate": 2.6021197383538026e-06, + "loss": 0.5586, + "step": 6306 + }, + { + "epoch": 0.26025418833044484, + "grad_norm": 4.6016514790085195, + "learning_rate": 2.6019837372574226e-06, + "loss": 0.561, + "step": 6307 + }, + { + "epoch": 0.26029545266980275, + "grad_norm": 1.8177253186447706, + "learning_rate": 2.601847716476857e-06, + "loss": 0.5529, + "step": 6308 + }, + { + "epoch": 0.26033671700916067, + "grad_norm": 2.4681352546144044, + "learning_rate": 2.6017116760145347e-06, + "loss": 0.6035, + "step": 6309 + }, + { + "epoch": 0.2603779813485186, + "grad_norm": 2.4593630573105245, + "learning_rate": 2.601575615872886e-06, + "loss": 0.5275, + "step": 6310 + }, + { + "epoch": 0.26041924568787655, + "grad_norm": 7.592770009720129, + "learning_rate": 2.60143953605434e-06, + "loss": 0.5672, + "step": 6311 + }, + { + "epoch": 0.26046051002723447, + "grad_norm": 2.7778815568627504, + "learning_rate": 2.6013034365613297e-06, + "loss": 0.569, + "step": 6312 + }, + { + "epoch": 0.2605017743665924, + "grad_norm": 10.189116634546076, + "learning_rate": 2.6011673173962845e-06, + "loss": 0.5382, + "step": 6313 + }, + { + "epoch": 0.2605430387059503, + "grad_norm": 10.252851681867, + "learning_rate": 2.6010311785616366e-06, + "loss": 0.5295, + "step": 6314 + }, + { + "epoch": 0.2605843030453082, + "grad_norm": 3.6473225683926396, + "learning_rate": 2.600895020059817e-06, + "loss": 0.5547, + "step": 6315 + }, + { + "epoch": 0.2606255673846662, + "grad_norm": 2.505555686561827, + "learning_rate": 2.600758841893259e-06, + "loss": 0.5106, + "step": 6316 + }, + { + "epoch": 0.2606668317240241, + "grad_norm": 3.2377419718987386, + "learning_rate": 2.6006226440643945e-06, + "loss": 0.6004, + "step": 6317 + }, + { + "epoch": 0.260708096063382, + "grad_norm": 3.34299586850812, + "learning_rate": 2.6004864265756555e-06, + "loss": 0.5192, + "step": 6318 + }, + { + "epoch": 0.26074936040273994, + "grad_norm": 4.726743087465189, + "learning_rate": 2.600350189429476e-06, + "loss": 0.6176, + "step": 6319 + }, + { + "epoch": 0.26079062474209785, + "grad_norm": 3.588191951930815, + "learning_rate": 2.60021393262829e-06, + "loss": 0.6289, + "step": 6320 + }, + { + "epoch": 0.2608318890814558, + "grad_norm": 4.638768682513202, + "learning_rate": 2.6000776561745304e-06, + "loss": 0.4662, + "step": 6321 + }, + { + "epoch": 0.26087315342081374, + "grad_norm": 2.7625574025806707, + "learning_rate": 2.599941360070632e-06, + "loss": 0.5537, + "step": 6322 + }, + { + "epoch": 0.26091441776017166, + "grad_norm": 3.5985045725726135, + "learning_rate": 2.599805044319029e-06, + "loss": 0.562, + "step": 6323 + }, + { + "epoch": 0.2609556820995296, + "grad_norm": 2.030140827485166, + "learning_rate": 2.5996687089221566e-06, + "loss": 0.5558, + "step": 6324 + }, + { + "epoch": 0.2609969464388875, + "grad_norm": 3.2829667004456176, + "learning_rate": 2.5995323538824506e-06, + "loss": 0.4846, + "step": 6325 + }, + { + "epoch": 0.26103821077824546, + "grad_norm": 2.560863049134592, + "learning_rate": 2.599395979202346e-06, + "loss": 0.5878, + "step": 6326 + }, + { + "epoch": 0.2610794751176034, + "grad_norm": 3.499435256215389, + "learning_rate": 2.599259584884279e-06, + "loss": 0.5974, + "step": 6327 + }, + { + "epoch": 0.2611207394569613, + "grad_norm": 4.277868818758554, + "learning_rate": 2.5991231709306857e-06, + "loss": 0.517, + "step": 6328 + }, + { + "epoch": 0.2611620037963192, + "grad_norm": 3.1981537427385947, + "learning_rate": 2.598986737344003e-06, + "loss": 0.5684, + "step": 6329 + }, + { + "epoch": 0.2612032681356771, + "grad_norm": 2.143295101109126, + "learning_rate": 2.598850284126668e-06, + "loss": 0.5263, + "step": 6330 + }, + { + "epoch": 0.2612445324750351, + "grad_norm": 2.065433925735599, + "learning_rate": 2.5987138112811184e-06, + "loss": 0.5739, + "step": 6331 + }, + { + "epoch": 0.261285796814393, + "grad_norm": 5.742962787413454, + "learning_rate": 2.598577318809791e-06, + "loss": 0.5729, + "step": 6332 + }, + { + "epoch": 0.26132706115375093, + "grad_norm": 5.791916894788421, + "learning_rate": 2.5984408067151245e-06, + "loss": 0.5359, + "step": 6333 + }, + { + "epoch": 0.26136832549310884, + "grad_norm": 2.7388579799110633, + "learning_rate": 2.5983042749995575e-06, + "loss": 0.5108, + "step": 6334 + }, + { + "epoch": 0.26140958983246676, + "grad_norm": 19.252202471043308, + "learning_rate": 2.5981677236655285e-06, + "loss": 0.536, + "step": 6335 + }, + { + "epoch": 0.26145085417182473, + "grad_norm": 2.4430846677005746, + "learning_rate": 2.598031152715477e-06, + "loss": 0.544, + "step": 6336 + }, + { + "epoch": 0.26149211851118265, + "grad_norm": 3.592653023896531, + "learning_rate": 2.5978945621518418e-06, + "loss": 0.5457, + "step": 6337 + }, + { + "epoch": 0.26153338285054056, + "grad_norm": 2.4019316056268876, + "learning_rate": 2.597757951977064e-06, + "loss": 0.5255, + "step": 6338 + }, + { + "epoch": 0.2615746471898985, + "grad_norm": 2.7454814510195154, + "learning_rate": 2.5976213221935826e-06, + "loss": 0.5542, + "step": 6339 + }, + { + "epoch": 0.2616159115292564, + "grad_norm": 4.327690889077297, + "learning_rate": 2.597484672803838e-06, + "loss": 0.5383, + "step": 6340 + }, + { + "epoch": 0.26165717586861437, + "grad_norm": 4.409308119358626, + "learning_rate": 2.5973480038102724e-06, + "loss": 0.5113, + "step": 6341 + }, + { + "epoch": 0.2616984402079723, + "grad_norm": 2.9809970648513118, + "learning_rate": 2.597211315215326e-06, + "loss": 0.5538, + "step": 6342 + }, + { + "epoch": 0.2617397045473302, + "grad_norm": 3.652801481177248, + "learning_rate": 2.5970746070214405e-06, + "loss": 0.5328, + "step": 6343 + }, + { + "epoch": 0.2617809688866881, + "grad_norm": 3.945760321518412, + "learning_rate": 2.5969378792310587e-06, + "loss": 0.5579, + "step": 6344 + }, + { + "epoch": 0.26182223322604603, + "grad_norm": 9.324454213691148, + "learning_rate": 2.5968011318466215e-06, + "loss": 0.5946, + "step": 6345 + }, + { + "epoch": 0.261863497565404, + "grad_norm": 2.9068817806807123, + "learning_rate": 2.596664364870573e-06, + "loss": 0.6042, + "step": 6346 + }, + { + "epoch": 0.2619047619047619, + "grad_norm": 2.94681159080975, + "learning_rate": 2.5965275783053553e-06, + "loss": 0.5203, + "step": 6347 + }, + { + "epoch": 0.26194602624411983, + "grad_norm": 4.2352145910221735, + "learning_rate": 2.596390772153412e-06, + "loss": 0.5742, + "step": 6348 + }, + { + "epoch": 0.26198729058347775, + "grad_norm": 2.3756429339796474, + "learning_rate": 2.5962539464171862e-06, + "loss": 0.5388, + "step": 6349 + }, + { + "epoch": 0.26202855492283567, + "grad_norm": 2.831170136191465, + "learning_rate": 2.596117101099123e-06, + "loss": 0.5863, + "step": 6350 + }, + { + "epoch": 0.26206981926219364, + "grad_norm": 4.421795335522185, + "learning_rate": 2.595980236201666e-06, + "loss": 0.4912, + "step": 6351 + }, + { + "epoch": 0.26211108360155155, + "grad_norm": 2.5991262315340298, + "learning_rate": 2.595843351727261e-06, + "loss": 0.5403, + "step": 6352 + }, + { + "epoch": 0.26215234794090947, + "grad_norm": 18.40074413808609, + "learning_rate": 2.5957064476783516e-06, + "loss": 0.5098, + "step": 6353 + }, + { + "epoch": 0.2621936122802674, + "grad_norm": 3.371151614440757, + "learning_rate": 2.5955695240573845e-06, + "loss": 0.5708, + "step": 6354 + }, + { + "epoch": 0.2622348766196253, + "grad_norm": 13.711827699142535, + "learning_rate": 2.5954325808668046e-06, + "loss": 0.549, + "step": 6355 + }, + { + "epoch": 0.2622761409589833, + "grad_norm": 2.6305134146546534, + "learning_rate": 2.5952956181090584e-06, + "loss": 0.5185, + "step": 6356 + }, + { + "epoch": 0.2623174052983412, + "grad_norm": 7.414838223133679, + "learning_rate": 2.595158635786593e-06, + "loss": 0.5531, + "step": 6357 + }, + { + "epoch": 0.2623586696376991, + "grad_norm": 2.585882596008401, + "learning_rate": 2.595021633901854e-06, + "loss": 0.5101, + "step": 6358 + }, + { + "epoch": 0.262399933977057, + "grad_norm": 2.85747437509442, + "learning_rate": 2.59488461245729e-06, + "loss": 0.5893, + "step": 6359 + }, + { + "epoch": 0.26244119831641494, + "grad_norm": 2.7463997057755796, + "learning_rate": 2.594747571455347e-06, + "loss": 0.6024, + "step": 6360 + }, + { + "epoch": 0.2624824626557729, + "grad_norm": 3.050959789245624, + "learning_rate": 2.5946105108984744e-06, + "loss": 0.5889, + "step": 6361 + }, + { + "epoch": 0.2625237269951308, + "grad_norm": 4.6891849455084325, + "learning_rate": 2.59447343078912e-06, + "loss": 0.5059, + "step": 6362 + }, + { + "epoch": 0.26256499133448874, + "grad_norm": 2.705155877672245, + "learning_rate": 2.5943363311297316e-06, + "loss": 0.5181, + "step": 6363 + }, + { + "epoch": 0.26260625567384666, + "grad_norm": 5.40915925966643, + "learning_rate": 2.594199211922759e-06, + "loss": 0.5445, + "step": 6364 + }, + { + "epoch": 0.26264752001320457, + "grad_norm": 2.6905069326745426, + "learning_rate": 2.5940620731706507e-06, + "loss": 0.5378, + "step": 6365 + }, + { + "epoch": 0.26268878435256254, + "grad_norm": 4.999087499937811, + "learning_rate": 2.5939249148758573e-06, + "loss": 0.5522, + "step": 6366 + }, + { + "epoch": 0.26273004869192046, + "grad_norm": 35.5957501480035, + "learning_rate": 2.593787737040828e-06, + "loss": 0.6077, + "step": 6367 + }, + { + "epoch": 0.2627713130312784, + "grad_norm": 4.2129419946766395, + "learning_rate": 2.5936505396680143e-06, + "loss": 0.5388, + "step": 6368 + }, + { + "epoch": 0.2628125773706363, + "grad_norm": 2.4485790043317754, + "learning_rate": 2.5935133227598653e-06, + "loss": 0.5145, + "step": 6369 + }, + { + "epoch": 0.2628538417099942, + "grad_norm": 2.942183188813924, + "learning_rate": 2.593376086318833e-06, + "loss": 0.5515, + "step": 6370 + }, + { + "epoch": 0.2628951060493521, + "grad_norm": 5.926575927157448, + "learning_rate": 2.5932388303473683e-06, + "loss": 0.5388, + "step": 6371 + }, + { + "epoch": 0.2629363703887101, + "grad_norm": 3.7442462317042375, + "learning_rate": 2.593101554847924e-06, + "loss": 0.5526, + "step": 6372 + }, + { + "epoch": 0.262977634728068, + "grad_norm": 2.4341427404322302, + "learning_rate": 2.592964259822951e-06, + "loss": 0.5408, + "step": 6373 + }, + { + "epoch": 0.2630188990674259, + "grad_norm": 2.6303819771798183, + "learning_rate": 2.5928269452749018e-06, + "loss": 0.5064, + "step": 6374 + }, + { + "epoch": 0.26306016340678384, + "grad_norm": 3.3368898391575095, + "learning_rate": 2.5926896112062295e-06, + "loss": 0.5648, + "step": 6375 + }, + { + "epoch": 0.26310142774614176, + "grad_norm": 4.607836085717937, + "learning_rate": 2.592552257619388e-06, + "loss": 0.5466, + "step": 6376 + }, + { + "epoch": 0.26314269208549973, + "grad_norm": 5.454438973622416, + "learning_rate": 2.5924148845168296e-06, + "loss": 0.5262, + "step": 6377 + }, + { + "epoch": 0.26318395642485765, + "grad_norm": 2.053694348559572, + "learning_rate": 2.5922774919010088e-06, + "loss": 0.562, + "step": 6378 + }, + { + "epoch": 0.26322522076421556, + "grad_norm": 3.7669473497309593, + "learning_rate": 2.592140079774379e-06, + "loss": 0.5602, + "step": 6379 + }, + { + "epoch": 0.2632664851035735, + "grad_norm": 3.4444663213468303, + "learning_rate": 2.592002648139396e-06, + "loss": 0.5269, + "step": 6380 + }, + { + "epoch": 0.2633077494429314, + "grad_norm": 3.1058118449491983, + "learning_rate": 2.591865196998513e-06, + "loss": 0.5715, + "step": 6381 + }, + { + "epoch": 0.26334901378228937, + "grad_norm": 2.7073024831742414, + "learning_rate": 2.591727726354187e-06, + "loss": 0.5514, + "step": 6382 + }, + { + "epoch": 0.2633902781216473, + "grad_norm": 3.829765140223118, + "learning_rate": 2.591590236208872e-06, + "loss": 0.5114, + "step": 6383 + }, + { + "epoch": 0.2634315424610052, + "grad_norm": 2.093201512123983, + "learning_rate": 2.591452726565025e-06, + "loss": 0.5218, + "step": 6384 + }, + { + "epoch": 0.2634728068003631, + "grad_norm": 4.53342396490053, + "learning_rate": 2.591315197425102e-06, + "loss": 0.5522, + "step": 6385 + }, + { + "epoch": 0.26351407113972103, + "grad_norm": 2.6093520833765096, + "learning_rate": 2.5911776487915596e-06, + "loss": 0.5744, + "step": 6386 + }, + { + "epoch": 0.263555335479079, + "grad_norm": 2.5759336612130097, + "learning_rate": 2.5910400806668546e-06, + "loss": 0.5416, + "step": 6387 + }, + { + "epoch": 0.2635965998184369, + "grad_norm": 3.7542829540062663, + "learning_rate": 2.590902493053444e-06, + "loss": 0.6123, + "step": 6388 + }, + { + "epoch": 0.26363786415779483, + "grad_norm": 4.50817395264325, + "learning_rate": 2.5907648859537864e-06, + "loss": 0.5595, + "step": 6389 + }, + { + "epoch": 0.26367912849715275, + "grad_norm": 3.4104947149461355, + "learning_rate": 2.590627259370339e-06, + "loss": 0.6094, + "step": 6390 + }, + { + "epoch": 0.26372039283651066, + "grad_norm": 2.524100317219306, + "learning_rate": 2.5904896133055607e-06, + "loss": 0.5462, + "step": 6391 + }, + { + "epoch": 0.26376165717586864, + "grad_norm": 2.21048729013647, + "learning_rate": 2.590351947761909e-06, + "loss": 0.5324, + "step": 6392 + }, + { + "epoch": 0.26380292151522655, + "grad_norm": 2.022094890837482, + "learning_rate": 2.590214262741845e-06, + "loss": 0.5228, + "step": 6393 + }, + { + "epoch": 0.26384418585458447, + "grad_norm": 3.0522509250193948, + "learning_rate": 2.5900765582478265e-06, + "loss": 0.5605, + "step": 6394 + }, + { + "epoch": 0.2638854501939424, + "grad_norm": 7.338022360359008, + "learning_rate": 2.589938834282314e-06, + "loss": 0.5674, + "step": 6395 + }, + { + "epoch": 0.2639267145333003, + "grad_norm": 15.888763885732633, + "learning_rate": 2.5898010908477666e-06, + "loss": 0.5685, + "step": 6396 + }, + { + "epoch": 0.26396797887265827, + "grad_norm": 2.107157265532751, + "learning_rate": 2.589663327946646e-06, + "loss": 0.5616, + "step": 6397 + }, + { + "epoch": 0.2640092432120162, + "grad_norm": 11.477227716497516, + "learning_rate": 2.5895255455814125e-06, + "loss": 0.5672, + "step": 6398 + }, + { + "epoch": 0.2640505075513741, + "grad_norm": 3.7442160744454225, + "learning_rate": 2.5893877437545266e-06, + "loss": 0.508, + "step": 6399 + }, + { + "epoch": 0.264091771890732, + "grad_norm": 2.190075600738966, + "learning_rate": 2.5892499224684513e-06, + "loss": 0.5418, + "step": 6400 + }, + { + "epoch": 0.26413303623008993, + "grad_norm": 3.621378278569591, + "learning_rate": 2.5891120817256468e-06, + "loss": 0.573, + "step": 6401 + }, + { + "epoch": 0.2641743005694479, + "grad_norm": 9.306287242727432, + "learning_rate": 2.588974221528576e-06, + "loss": 0.5911, + "step": 6402 + }, + { + "epoch": 0.2642155649088058, + "grad_norm": 3.8336875537954525, + "learning_rate": 2.588836341879702e-06, + "loss": 0.5403, + "step": 6403 + }, + { + "epoch": 0.26425682924816374, + "grad_norm": 2.2796770198905665, + "learning_rate": 2.588698442781486e-06, + "loss": 0.5648, + "step": 6404 + }, + { + "epoch": 0.26429809358752165, + "grad_norm": 2.629472419874519, + "learning_rate": 2.5885605242363934e-06, + "loss": 0.5112, + "step": 6405 + }, + { + "epoch": 0.26433935792687957, + "grad_norm": 4.248058213193474, + "learning_rate": 2.588422586246886e-06, + "loss": 0.5338, + "step": 6406 + }, + { + "epoch": 0.26438062226623754, + "grad_norm": 2.9433260702738497, + "learning_rate": 2.588284628815429e-06, + "loss": 0.5998, + "step": 6407 + }, + { + "epoch": 0.26442188660559546, + "grad_norm": 6.204653208650667, + "learning_rate": 2.5881466519444855e-06, + "loss": 0.5674, + "step": 6408 + }, + { + "epoch": 0.2644631509449534, + "grad_norm": 5.964495092982201, + "learning_rate": 2.588008655636521e-06, + "loss": 0.5654, + "step": 6409 + }, + { + "epoch": 0.2645044152843113, + "grad_norm": 1.9930091179973788, + "learning_rate": 2.587870639894e-06, + "loss": 0.5086, + "step": 6410 + }, + { + "epoch": 0.2645456796236692, + "grad_norm": 1.7538173835882092, + "learning_rate": 2.587732604719388e-06, + "loss": 0.5531, + "step": 6411 + }, + { + "epoch": 0.2645869439630272, + "grad_norm": 11.414142419064664, + "learning_rate": 2.5875945501151503e-06, + "loss": 0.5289, + "step": 6412 + }, + { + "epoch": 0.2646282083023851, + "grad_norm": 3.1566886819343987, + "learning_rate": 2.5874564760837532e-06, + "loss": 0.5544, + "step": 6413 + }, + { + "epoch": 0.264669472641743, + "grad_norm": 3.285493001631718, + "learning_rate": 2.5873183826276634e-06, + "loss": 0.5908, + "step": 6414 + }, + { + "epoch": 0.2647107369811009, + "grad_norm": 4.809216291606286, + "learning_rate": 2.587180269749347e-06, + "loss": 0.5787, + "step": 6415 + }, + { + "epoch": 0.26475200132045884, + "grad_norm": 3.0008260759900214, + "learning_rate": 2.587042137451271e-06, + "loss": 0.5385, + "step": 6416 + }, + { + "epoch": 0.2647932656598168, + "grad_norm": 2.092641754590074, + "learning_rate": 2.5869039857359036e-06, + "loss": 0.5389, + "step": 6417 + }, + { + "epoch": 0.26483452999917473, + "grad_norm": 3.3211981515205347, + "learning_rate": 2.586765814605712e-06, + "loss": 0.5093, + "step": 6418 + }, + { + "epoch": 0.26487579433853264, + "grad_norm": 3.2117420084832102, + "learning_rate": 2.586627624063164e-06, + "loss": 0.617, + "step": 6419 + }, + { + "epoch": 0.26491705867789056, + "grad_norm": 3.0059534012667775, + "learning_rate": 2.5864894141107284e-06, + "loss": 0.5651, + "step": 6420 + }, + { + "epoch": 0.2649583230172485, + "grad_norm": 5.682135323088766, + "learning_rate": 2.5863511847508742e-06, + "loss": 0.6043, + "step": 6421 + }, + { + "epoch": 0.26499958735660645, + "grad_norm": 2.849342911457631, + "learning_rate": 2.5862129359860695e-06, + "loss": 0.5071, + "step": 6422 + }, + { + "epoch": 0.26504085169596436, + "grad_norm": 2.4823187332732175, + "learning_rate": 2.586074667818785e-06, + "loss": 0.5654, + "step": 6423 + }, + { + "epoch": 0.2650821160353223, + "grad_norm": 3.5226372397350505, + "learning_rate": 2.5859363802514895e-06, + "loss": 0.553, + "step": 6424 + }, + { + "epoch": 0.2651233803746802, + "grad_norm": 3.4896553756140465, + "learning_rate": 2.585798073286654e-06, + "loss": 0.5179, + "step": 6425 + }, + { + "epoch": 0.2651646447140381, + "grad_norm": 3.0086707633511276, + "learning_rate": 2.5856597469267493e-06, + "loss": 0.5756, + "step": 6426 + }, + { + "epoch": 0.2652059090533961, + "grad_norm": 2.6233079685149945, + "learning_rate": 2.5855214011742445e-06, + "loss": 0.5574, + "step": 6427 + }, + { + "epoch": 0.265247173392754, + "grad_norm": 4.758395975010338, + "learning_rate": 2.5853830360316123e-06, + "loss": 0.5483, + "step": 6428 + }, + { + "epoch": 0.2652884377321119, + "grad_norm": 28.034672329955264, + "learning_rate": 2.585244651501324e-06, + "loss": 0.4881, + "step": 6429 + }, + { + "epoch": 0.26532970207146983, + "grad_norm": 2.8644562439645465, + "learning_rate": 2.5851062475858515e-06, + "loss": 0.5429, + "step": 6430 + }, + { + "epoch": 0.26537096641082775, + "grad_norm": 2.012847855300314, + "learning_rate": 2.5849678242876668e-06, + "loss": 0.5535, + "step": 6431 + }, + { + "epoch": 0.26541223075018566, + "grad_norm": 1.8723391415857564, + "learning_rate": 2.5848293816092424e-06, + "loss": 0.5039, + "step": 6432 + }, + { + "epoch": 0.26545349508954363, + "grad_norm": 3.036108927368421, + "learning_rate": 2.5846909195530513e-06, + "loss": 0.5703, + "step": 6433 + }, + { + "epoch": 0.26549475942890155, + "grad_norm": 3.080672017595251, + "learning_rate": 2.5845524381215677e-06, + "loss": 0.6027, + "step": 6434 + }, + { + "epoch": 0.26553602376825947, + "grad_norm": 4.658601893688101, + "learning_rate": 2.5844139373172636e-06, + "loss": 0.5094, + "step": 6435 + }, + { + "epoch": 0.2655772881076174, + "grad_norm": 1.98803266979686, + "learning_rate": 2.5842754171426137e-06, + "loss": 0.4798, + "step": 6436 + }, + { + "epoch": 0.2656185524469753, + "grad_norm": 8.648741159932053, + "learning_rate": 2.584136877600093e-06, + "loss": 0.532, + "step": 6437 + }, + { + "epoch": 0.26565981678633327, + "grad_norm": 2.053736032783258, + "learning_rate": 2.5839983186921748e-06, + "loss": 0.5646, + "step": 6438 + }, + { + "epoch": 0.2657010811256912, + "grad_norm": 4.990540319496759, + "learning_rate": 2.583859740421336e-06, + "loss": 0.5189, + "step": 6439 + }, + { + "epoch": 0.2657423454650491, + "grad_norm": 6.575916452958206, + "learning_rate": 2.58372114279005e-06, + "loss": 0.5856, + "step": 6440 + }, + { + "epoch": 0.265783609804407, + "grad_norm": 7.944484682004012, + "learning_rate": 2.5835825258007936e-06, + "loss": 0.5986, + "step": 6441 + }, + { + "epoch": 0.26582487414376493, + "grad_norm": 2.663906476788227, + "learning_rate": 2.583443889456042e-06, + "loss": 0.5735, + "step": 6442 + }, + { + "epoch": 0.2658661384831229, + "grad_norm": 3.432016401809526, + "learning_rate": 2.583305233758273e-06, + "loss": 0.5585, + "step": 6443 + }, + { + "epoch": 0.2659074028224808, + "grad_norm": 5.5912975527647895, + "learning_rate": 2.5831665587099624e-06, + "loss": 0.4808, + "step": 6444 + }, + { + "epoch": 0.26594866716183874, + "grad_norm": 2.753966186317837, + "learning_rate": 2.583027864313587e-06, + "loss": 0.5917, + "step": 6445 + }, + { + "epoch": 0.26598993150119665, + "grad_norm": 2.8313396572169345, + "learning_rate": 2.582889150571625e-06, + "loss": 0.5465, + "step": 6446 + }, + { + "epoch": 0.26603119584055457, + "grad_norm": 2.7524042422054618, + "learning_rate": 2.582750417486553e-06, + "loss": 0.5359, + "step": 6447 + }, + { + "epoch": 0.26607246017991254, + "grad_norm": 2.1481685221219577, + "learning_rate": 2.582611665060851e-06, + "loss": 0.5379, + "step": 6448 + }, + { + "epoch": 0.26611372451927046, + "grad_norm": 4.211007734454713, + "learning_rate": 2.5824728932969957e-06, + "loss": 0.5517, + "step": 6449 + }, + { + "epoch": 0.26615498885862837, + "grad_norm": 1.912596756321829, + "learning_rate": 2.582334102197467e-06, + "loss": 0.5187, + "step": 6450 + }, + { + "epoch": 0.2661962531979863, + "grad_norm": 3.3298135528079813, + "learning_rate": 2.5821952917647433e-06, + "loss": 0.535, + "step": 6451 + }, + { + "epoch": 0.2662375175373442, + "grad_norm": 3.2134529468715898, + "learning_rate": 2.582056462001305e-06, + "loss": 0.5283, + "step": 6452 + }, + { + "epoch": 0.2662787818767022, + "grad_norm": 2.5778198643133052, + "learning_rate": 2.581917612909631e-06, + "loss": 0.5463, + "step": 6453 + }, + { + "epoch": 0.2663200462160601, + "grad_norm": 2.484792023297571, + "learning_rate": 2.5817787444922015e-06, + "loss": 0.5562, + "step": 6454 + }, + { + "epoch": 0.266361310555418, + "grad_norm": 2.1074551097018084, + "learning_rate": 2.581639856751498e-06, + "loss": 0.5222, + "step": 6455 + }, + { + "epoch": 0.2664025748947759, + "grad_norm": 2.2941617660433127, + "learning_rate": 2.581500949690001e-06, + "loss": 0.4991, + "step": 6456 + }, + { + "epoch": 0.26644383923413384, + "grad_norm": 2.947311388612125, + "learning_rate": 2.5813620233101906e-06, + "loss": 0.5083, + "step": 6457 + }, + { + "epoch": 0.2664851035734918, + "grad_norm": 1.9292766813813331, + "learning_rate": 2.58122307761455e-06, + "loss": 0.5638, + "step": 6458 + }, + { + "epoch": 0.2665263679128497, + "grad_norm": 5.135478547717948, + "learning_rate": 2.5810841126055605e-06, + "loss": 0.5509, + "step": 6459 + }, + { + "epoch": 0.26656763225220764, + "grad_norm": 2.045197455480979, + "learning_rate": 2.580945128285704e-06, + "loss": 0.5314, + "step": 6460 + }, + { + "epoch": 0.26660889659156556, + "grad_norm": 2.9651545664117847, + "learning_rate": 2.5808061246574633e-06, + "loss": 0.4905, + "step": 6461 + }, + { + "epoch": 0.2666501609309235, + "grad_norm": 2.280188058840585, + "learning_rate": 2.580667101723322e-06, + "loss": 0.5598, + "step": 6462 + }, + { + "epoch": 0.26669142527028145, + "grad_norm": 2.8088928814423064, + "learning_rate": 2.5805280594857627e-06, + "loss": 0.5751, + "step": 6463 + }, + { + "epoch": 0.26673268960963936, + "grad_norm": 2.320584849612772, + "learning_rate": 2.580388997947269e-06, + "loss": 0.5419, + "step": 6464 + }, + { + "epoch": 0.2667739539489973, + "grad_norm": 7.149448449748245, + "learning_rate": 2.5802499171103248e-06, + "loss": 0.589, + "step": 6465 + }, + { + "epoch": 0.2668152182883552, + "grad_norm": 3.1143557447248056, + "learning_rate": 2.580110816977415e-06, + "loss": 0.5217, + "step": 6466 + }, + { + "epoch": 0.2668564826277131, + "grad_norm": 2.8831145092191144, + "learning_rate": 2.5799716975510237e-06, + "loss": 0.5283, + "step": 6467 + }, + { + "epoch": 0.2668977469670711, + "grad_norm": 2.943670156863075, + "learning_rate": 2.5798325588336367e-06, + "loss": 0.5576, + "step": 6468 + }, + { + "epoch": 0.266939011306429, + "grad_norm": 2.832224860982259, + "learning_rate": 2.5796934008277386e-06, + "loss": 0.5448, + "step": 6469 + }, + { + "epoch": 0.2669802756457869, + "grad_norm": 2.2844873633985836, + "learning_rate": 2.5795542235358156e-06, + "loss": 0.5692, + "step": 6470 + }, + { + "epoch": 0.26702153998514483, + "grad_norm": 2.3488914650177035, + "learning_rate": 2.579415026960353e-06, + "loss": 0.5727, + "step": 6471 + }, + { + "epoch": 0.26706280432450274, + "grad_norm": 2.653143491943819, + "learning_rate": 2.5792758111038382e-06, + "loss": 0.5309, + "step": 6472 + }, + { + "epoch": 0.2671040686638607, + "grad_norm": 3.1584443256525225, + "learning_rate": 2.5791365759687573e-06, + "loss": 0.5074, + "step": 6473 + }, + { + "epoch": 0.26714533300321863, + "grad_norm": 2.201857775710368, + "learning_rate": 2.578997321557598e-06, + "loss": 0.5622, + "step": 6474 + }, + { + "epoch": 0.26718659734257655, + "grad_norm": 3.012386426949232, + "learning_rate": 2.578858047872847e-06, + "loss": 0.5533, + "step": 6475 + }, + { + "epoch": 0.26722786168193446, + "grad_norm": 3.8879439184953646, + "learning_rate": 2.5787187549169924e-06, + "loss": 0.5599, + "step": 6476 + }, + { + "epoch": 0.2672691260212924, + "grad_norm": 5.429630021739109, + "learning_rate": 2.5785794426925222e-06, + "loss": 0.5864, + "step": 6477 + }, + { + "epoch": 0.26731039036065035, + "grad_norm": 9.564494377176679, + "learning_rate": 2.5784401112019247e-06, + "loss": 0.6281, + "step": 6478 + }, + { + "epoch": 0.26735165470000827, + "grad_norm": 6.798037738676304, + "learning_rate": 2.5783007604476893e-06, + "loss": 0.5108, + "step": 6479 + }, + { + "epoch": 0.2673929190393662, + "grad_norm": 2.6815875446080883, + "learning_rate": 2.578161390432305e-06, + "loss": 0.5843, + "step": 6480 + }, + { + "epoch": 0.2674341833787241, + "grad_norm": 1.9563000794729566, + "learning_rate": 2.5780220011582603e-06, + "loss": 0.518, + "step": 6481 + }, + { + "epoch": 0.267475447718082, + "grad_norm": 4.266069169816757, + "learning_rate": 2.577882592628047e-06, + "loss": 0.5381, + "step": 6482 + }, + { + "epoch": 0.26751671205744, + "grad_norm": 3.5698362594429507, + "learning_rate": 2.5777431648441537e-06, + "loss": 0.5098, + "step": 6483 + }, + { + "epoch": 0.2675579763967979, + "grad_norm": 4.154221156351994, + "learning_rate": 2.5776037178090714e-06, + "loss": 0.5545, + "step": 6484 + }, + { + "epoch": 0.2675992407361558, + "grad_norm": 4.505002209567328, + "learning_rate": 2.5774642515252904e-06, + "loss": 0.5436, + "step": 6485 + }, + { + "epoch": 0.26764050507551373, + "grad_norm": 4.984205356373493, + "learning_rate": 2.577324765995303e-06, + "loss": 0.5622, + "step": 6486 + }, + { + "epoch": 0.26768176941487165, + "grad_norm": 2.826804636254674, + "learning_rate": 2.5771852612215998e-06, + "loss": 0.56, + "step": 6487 + }, + { + "epoch": 0.2677230337542296, + "grad_norm": 2.0521268983067205, + "learning_rate": 2.5770457372066738e-06, + "loss": 0.6099, + "step": 6488 + }, + { + "epoch": 0.26776429809358754, + "grad_norm": 2.71958135539496, + "learning_rate": 2.576906193953016e-06, + "loss": 0.639, + "step": 6489 + }, + { + "epoch": 0.26780556243294545, + "grad_norm": 4.746643798890504, + "learning_rate": 2.5767666314631197e-06, + "loss": 0.5479, + "step": 6490 + }, + { + "epoch": 0.26784682677230337, + "grad_norm": 5.296114833898723, + "learning_rate": 2.5766270497394778e-06, + "loss": 0.5233, + "step": 6491 + }, + { + "epoch": 0.2678880911116613, + "grad_norm": 1.9322451064413868, + "learning_rate": 2.5764874487845838e-06, + "loss": 0.5327, + "step": 6492 + }, + { + "epoch": 0.2679293554510192, + "grad_norm": 8.336465399597175, + "learning_rate": 2.5763478286009306e-06, + "loss": 0.5472, + "step": 6493 + }, + { + "epoch": 0.2679706197903772, + "grad_norm": 5.156688916670559, + "learning_rate": 2.5762081891910122e-06, + "loss": 0.5543, + "step": 6494 + }, + { + "epoch": 0.2680118841297351, + "grad_norm": 4.318274124067826, + "learning_rate": 2.576068530557324e-06, + "loss": 0.5375, + "step": 6495 + }, + { + "epoch": 0.268053148469093, + "grad_norm": 2.593493471270296, + "learning_rate": 2.57592885270236e-06, + "loss": 0.6094, + "step": 6496 + }, + { + "epoch": 0.2680944128084509, + "grad_norm": 2.8174392667837482, + "learning_rate": 2.5757891556286146e-06, + "loss": 0.5545, + "step": 6497 + }, + { + "epoch": 0.26813567714780884, + "grad_norm": 3.2391962303900717, + "learning_rate": 2.575649439338584e-06, + "loss": 0.5146, + "step": 6498 + }, + { + "epoch": 0.2681769414871668, + "grad_norm": 3.0999395976085453, + "learning_rate": 2.575509703834763e-06, + "loss": 0.544, + "step": 6499 + }, + { + "epoch": 0.2682182058265247, + "grad_norm": 5.96155310548784, + "learning_rate": 2.5753699491196486e-06, + "loss": 0.5199, + "step": 6500 + }, + { + "epoch": 0.26825947016588264, + "grad_norm": 6.30558596610009, + "learning_rate": 2.575230175195737e-06, + "loss": 0.5128, + "step": 6501 + }, + { + "epoch": 0.26830073450524056, + "grad_norm": 4.02881689290081, + "learning_rate": 2.5750903820655245e-06, + "loss": 0.4968, + "step": 6502 + }, + { + "epoch": 0.2683419988445985, + "grad_norm": 2.8511906969636467, + "learning_rate": 2.574950569731508e-06, + "loss": 0.4888, + "step": 6503 + }, + { + "epoch": 0.26838326318395644, + "grad_norm": 3.2330554302532484, + "learning_rate": 2.5748107381961856e-06, + "loss": 0.5066, + "step": 6504 + }, + { + "epoch": 0.26842452752331436, + "grad_norm": 8.93635489611825, + "learning_rate": 2.5746708874620544e-06, + "loss": 0.5299, + "step": 6505 + }, + { + "epoch": 0.2684657918626723, + "grad_norm": 2.956324726684149, + "learning_rate": 2.574531017531613e-06, + "loss": 0.5165, + "step": 6506 + }, + { + "epoch": 0.2685070562020302, + "grad_norm": 2.7671881069981317, + "learning_rate": 2.574391128407359e-06, + "loss": 0.525, + "step": 6507 + }, + { + "epoch": 0.2685483205413881, + "grad_norm": 2.2682423252090667, + "learning_rate": 2.5742512200917923e-06, + "loss": 0.5056, + "step": 6508 + }, + { + "epoch": 0.2685895848807461, + "grad_norm": 5.541494957200762, + "learning_rate": 2.5741112925874114e-06, + "loss": 0.6354, + "step": 6509 + }, + { + "epoch": 0.268630849220104, + "grad_norm": 16.86004762360349, + "learning_rate": 2.5739713458967156e-06, + "loss": 0.571, + "step": 6510 + }, + { + "epoch": 0.2686721135594619, + "grad_norm": 4.768667370818037, + "learning_rate": 2.5738313800222047e-06, + "loss": 0.5319, + "step": 6511 + }, + { + "epoch": 0.2687133778988198, + "grad_norm": 3.882335551726952, + "learning_rate": 2.5736913949663794e-06, + "loss": 0.5764, + "step": 6512 + }, + { + "epoch": 0.26875464223817774, + "grad_norm": 10.076869731051557, + "learning_rate": 2.5735513907317398e-06, + "loss": 0.5237, + "step": 6513 + }, + { + "epoch": 0.2687959065775357, + "grad_norm": 3.016034496351198, + "learning_rate": 2.5734113673207862e-06, + "loss": 0.5287, + "step": 6514 + }, + { + "epoch": 0.26883717091689363, + "grad_norm": 4.097348832753523, + "learning_rate": 2.573271324736021e-06, + "loss": 0.5456, + "step": 6515 + }, + { + "epoch": 0.26887843525625155, + "grad_norm": 2.7676577052509272, + "learning_rate": 2.573131262979945e-06, + "loss": 0.5714, + "step": 6516 + }, + { + "epoch": 0.26891969959560946, + "grad_norm": 3.0565540548095393, + "learning_rate": 2.5729911820550594e-06, + "loss": 0.513, + "step": 6517 + }, + { + "epoch": 0.2689609639349674, + "grad_norm": 2.720076116614369, + "learning_rate": 2.572851081963868e-06, + "loss": 0.533, + "step": 6518 + }, + { + "epoch": 0.26900222827432535, + "grad_norm": 3.6456403645559803, + "learning_rate": 2.572710962708872e-06, + "loss": 0.6266, + "step": 6519 + }, + { + "epoch": 0.26904349261368327, + "grad_norm": 2.2672985685626026, + "learning_rate": 2.5725708242925747e-06, + "loss": 0.5278, + "step": 6520 + }, + { + "epoch": 0.2690847569530412, + "grad_norm": 4.8494266932930294, + "learning_rate": 2.572430666717479e-06, + "loss": 0.5159, + "step": 6521 + }, + { + "epoch": 0.2691260212923991, + "grad_norm": 2.4668247353477937, + "learning_rate": 2.5722904899860887e-06, + "loss": 0.567, + "step": 6522 + }, + { + "epoch": 0.269167285631757, + "grad_norm": 3.151687464619152, + "learning_rate": 2.5721502941009083e-06, + "loss": 0.5846, + "step": 6523 + }, + { + "epoch": 0.269208549971115, + "grad_norm": 2.9952896311959116, + "learning_rate": 2.5720100790644418e-06, + "loss": 0.5186, + "step": 6524 + }, + { + "epoch": 0.2692498143104729, + "grad_norm": 4.392568171714036, + "learning_rate": 2.571869844879193e-06, + "loss": 0.4881, + "step": 6525 + }, + { + "epoch": 0.2692910786498308, + "grad_norm": 2.8282254576244865, + "learning_rate": 2.571729591547667e-06, + "loss": 0.5964, + "step": 6526 + }, + { + "epoch": 0.26933234298918873, + "grad_norm": 3.032404561044813, + "learning_rate": 2.5715893190723705e-06, + "loss": 0.5425, + "step": 6527 + }, + { + "epoch": 0.26937360732854665, + "grad_norm": 4.294369202952511, + "learning_rate": 2.571449027455807e-06, + "loss": 0.5861, + "step": 6528 + }, + { + "epoch": 0.2694148716679046, + "grad_norm": 3.5002935766855257, + "learning_rate": 2.571308716700484e-06, + "loss": 0.5378, + "step": 6529 + }, + { + "epoch": 0.26945613600726254, + "grad_norm": 2.615676893169648, + "learning_rate": 2.5711683868089075e-06, + "loss": 0.5103, + "step": 6530 + }, + { + "epoch": 0.26949740034662045, + "grad_norm": 5.116815861976112, + "learning_rate": 2.5710280377835834e-06, + "loss": 0.5784, + "step": 6531 + }, + { + "epoch": 0.26953866468597837, + "grad_norm": 14.137299979337838, + "learning_rate": 2.5708876696270202e-06, + "loss": 0.548, + "step": 6532 + }, + { + "epoch": 0.2695799290253363, + "grad_norm": 2.5972003744162055, + "learning_rate": 2.570747282341723e-06, + "loss": 0.6033, + "step": 6533 + }, + { + "epoch": 0.26962119336469426, + "grad_norm": 3.9350398085777174, + "learning_rate": 2.570606875930202e-06, + "loss": 0.5665, + "step": 6534 + }, + { + "epoch": 0.26966245770405217, + "grad_norm": 2.9257247414318437, + "learning_rate": 2.5704664503949633e-06, + "loss": 0.5678, + "step": 6535 + }, + { + "epoch": 0.2697037220434101, + "grad_norm": 2.7983166554980015, + "learning_rate": 2.5703260057385157e-06, + "loss": 0.5753, + "step": 6536 + }, + { + "epoch": 0.269744986382768, + "grad_norm": 7.238557906193331, + "learning_rate": 2.570185541963368e-06, + "loss": 0.6266, + "step": 6537 + }, + { + "epoch": 0.2697862507221259, + "grad_norm": 11.891041835852455, + "learning_rate": 2.57004505907203e-06, + "loss": 0.5676, + "step": 6538 + }, + { + "epoch": 0.2698275150614839, + "grad_norm": 9.040199548848783, + "learning_rate": 2.5699045570670097e-06, + "loss": 0.5731, + "step": 6539 + }, + { + "epoch": 0.2698687794008418, + "grad_norm": 2.9501394290178427, + "learning_rate": 2.5697640359508182e-06, + "loss": 0.5295, + "step": 6540 + }, + { + "epoch": 0.2699100437401997, + "grad_norm": 1.9849059459135987, + "learning_rate": 2.5696234957259645e-06, + "loss": 0.4972, + "step": 6541 + }, + { + "epoch": 0.26995130807955764, + "grad_norm": 2.444516323184735, + "learning_rate": 2.5694829363949597e-06, + "loss": 0.538, + "step": 6542 + }, + { + "epoch": 0.26999257241891556, + "grad_norm": 2.0174922506565034, + "learning_rate": 2.5693423579603138e-06, + "loss": 0.5397, + "step": 6543 + }, + { + "epoch": 0.2700338367582735, + "grad_norm": 1.4305818863636237, + "learning_rate": 2.5692017604245388e-06, + "loss": 0.5146, + "step": 6544 + }, + { + "epoch": 0.27007510109763144, + "grad_norm": 5.480395120128496, + "learning_rate": 2.5690611437901452e-06, + "loss": 0.6026, + "step": 6545 + }, + { + "epoch": 0.27011636543698936, + "grad_norm": 6.279245657817174, + "learning_rate": 2.568920508059645e-06, + "loss": 0.5365, + "step": 6546 + }, + { + "epoch": 0.2701576297763473, + "grad_norm": 2.291214692094578, + "learning_rate": 2.568779853235551e-06, + "loss": 0.5286, + "step": 6547 + }, + { + "epoch": 0.2701988941157052, + "grad_norm": 2.513779815065611, + "learning_rate": 2.5686391793203747e-06, + "loss": 0.5467, + "step": 6548 + }, + { + "epoch": 0.27024015845506316, + "grad_norm": 2.8615898479451247, + "learning_rate": 2.5684984863166297e-06, + "loss": 0.5696, + "step": 6549 + }, + { + "epoch": 0.2702814227944211, + "grad_norm": 3.2713871002479884, + "learning_rate": 2.5683577742268284e-06, + "loss": 0.4938, + "step": 6550 + }, + { + "epoch": 0.270322687133779, + "grad_norm": 2.4229834826981858, + "learning_rate": 2.568217043053485e-06, + "loss": 0.5543, + "step": 6551 + }, + { + "epoch": 0.2703639514731369, + "grad_norm": 2.349021065249948, + "learning_rate": 2.568076292799113e-06, + "loss": 0.5447, + "step": 6552 + }, + { + "epoch": 0.2704052158124948, + "grad_norm": 45.64596029401004, + "learning_rate": 2.5679355234662262e-06, + "loss": 0.5599, + "step": 6553 + }, + { + "epoch": 0.27044648015185274, + "grad_norm": 9.193456447381362, + "learning_rate": 2.5677947350573393e-06, + "loss": 0.5317, + "step": 6554 + }, + { + "epoch": 0.2704877444912107, + "grad_norm": 2.621613109826309, + "learning_rate": 2.5676539275749675e-06, + "loss": 0.5549, + "step": 6555 + }, + { + "epoch": 0.27052900883056863, + "grad_norm": 3.1905213085265793, + "learning_rate": 2.5675131010216256e-06, + "loss": 0.5016, + "step": 6556 + }, + { + "epoch": 0.27057027316992655, + "grad_norm": 5.924320988333946, + "learning_rate": 2.5673722553998292e-06, + "loss": 0.5233, + "step": 6557 + }, + { + "epoch": 0.27061153750928446, + "grad_norm": 2.7565890705934435, + "learning_rate": 2.567231390712094e-06, + "loss": 0.5627, + "step": 6558 + }, + { + "epoch": 0.2706528018486424, + "grad_norm": 2.6122914748354016, + "learning_rate": 2.5670905069609365e-06, + "loss": 0.5319, + "step": 6559 + }, + { + "epoch": 0.27069406618800035, + "grad_norm": 2.4255198221335292, + "learning_rate": 2.566949604148873e-06, + "loss": 0.5435, + "step": 6560 + }, + { + "epoch": 0.27073533052735826, + "grad_norm": 2.1797211088737725, + "learning_rate": 2.5668086822784207e-06, + "loss": 0.5789, + "step": 6561 + }, + { + "epoch": 0.2707765948667162, + "grad_norm": 2.7017032185873906, + "learning_rate": 2.5666677413520965e-06, + "loss": 0.6221, + "step": 6562 + }, + { + "epoch": 0.2708178592060741, + "grad_norm": 6.7100663639072335, + "learning_rate": 2.5665267813724183e-06, + "loss": 0.5639, + "step": 6563 + }, + { + "epoch": 0.270859123545432, + "grad_norm": 2.727303098976555, + "learning_rate": 2.5663858023419032e-06, + "loss": 0.487, + "step": 6564 + }, + { + "epoch": 0.27090038788479, + "grad_norm": 2.7003730285978422, + "learning_rate": 2.5662448042630702e-06, + "loss": 0.5416, + "step": 6565 + }, + { + "epoch": 0.2709416522241479, + "grad_norm": 2.9545952943814457, + "learning_rate": 2.5661037871384375e-06, + "loss": 0.5348, + "step": 6566 + }, + { + "epoch": 0.2709829165635058, + "grad_norm": 10.840035788780849, + "learning_rate": 2.5659627509705243e-06, + "loss": 0.5537, + "step": 6567 + }, + { + "epoch": 0.27102418090286373, + "grad_norm": 2.7593233873584397, + "learning_rate": 2.56582169576185e-06, + "loss": 0.5159, + "step": 6568 + }, + { + "epoch": 0.27106544524222165, + "grad_norm": 2.919025272518411, + "learning_rate": 2.5656806215149335e-06, + "loss": 0.5018, + "step": 6569 + }, + { + "epoch": 0.2711067095815796, + "grad_norm": 4.728264770358736, + "learning_rate": 2.5655395282322955e-06, + "loss": 0.5686, + "step": 6570 + }, + { + "epoch": 0.27114797392093754, + "grad_norm": 5.76051322937361, + "learning_rate": 2.565398415916456e-06, + "loss": 0.5236, + "step": 6571 + }, + { + "epoch": 0.27118923826029545, + "grad_norm": 2.613418529224391, + "learning_rate": 2.5652572845699357e-06, + "loss": 0.5318, + "step": 6572 + }, + { + "epoch": 0.27123050259965337, + "grad_norm": 5.944625592874504, + "learning_rate": 2.565116134195255e-06, + "loss": 0.5371, + "step": 6573 + }, + { + "epoch": 0.2712717669390113, + "grad_norm": 7.423682473837498, + "learning_rate": 2.5649749647949356e-06, + "loss": 0.5677, + "step": 6574 + }, + { + "epoch": 0.27131303127836925, + "grad_norm": 6.668748114384977, + "learning_rate": 2.5648337763715e-06, + "loss": 0.5738, + "step": 6575 + }, + { + "epoch": 0.27135429561772717, + "grad_norm": 2.8725068937187017, + "learning_rate": 2.564692568927468e-06, + "loss": 0.5848, + "step": 6576 + }, + { + "epoch": 0.2713955599570851, + "grad_norm": 1.9041878554667642, + "learning_rate": 2.5645513424653644e-06, + "loss": 0.5066, + "step": 6577 + }, + { + "epoch": 0.271436824296443, + "grad_norm": 5.194334731525901, + "learning_rate": 2.56441009698771e-06, + "loss": 0.5998, + "step": 6578 + }, + { + "epoch": 0.2714780886358009, + "grad_norm": 3.484581088955999, + "learning_rate": 2.564268832497029e-06, + "loss": 0.5787, + "step": 6579 + }, + { + "epoch": 0.2715193529751589, + "grad_norm": 2.6691501128407027, + "learning_rate": 2.564127548995844e-06, + "loss": 0.5222, + "step": 6580 + }, + { + "epoch": 0.2715606173145168, + "grad_norm": 2.0649526865116012, + "learning_rate": 2.563986246486679e-06, + "loss": 0.5694, + "step": 6581 + }, + { + "epoch": 0.2716018816538747, + "grad_norm": 5.2383616626284075, + "learning_rate": 2.563844924972058e-06, + "loss": 0.5864, + "step": 6582 + }, + { + "epoch": 0.27164314599323264, + "grad_norm": 2.659901248807135, + "learning_rate": 2.563703584454505e-06, + "loss": 0.5204, + "step": 6583 + }, + { + "epoch": 0.27168441033259055, + "grad_norm": 4.390921812111533, + "learning_rate": 2.5635622249365458e-06, + "loss": 0.5261, + "step": 6584 + }, + { + "epoch": 0.2717256746719485, + "grad_norm": 2.29320905759491, + "learning_rate": 2.563420846420704e-06, + "loss": 0.5161, + "step": 6585 + }, + { + "epoch": 0.27176693901130644, + "grad_norm": 2.028694412648014, + "learning_rate": 2.5632794489095055e-06, + "loss": 0.5536, + "step": 6586 + }, + { + "epoch": 0.27180820335066436, + "grad_norm": 9.600775227937168, + "learning_rate": 2.5631380324054763e-06, + "loss": 0.5531, + "step": 6587 + }, + { + "epoch": 0.2718494676900223, + "grad_norm": 5.725933373818947, + "learning_rate": 2.562996596911143e-06, + "loss": 0.6236, + "step": 6588 + }, + { + "epoch": 0.2718907320293802, + "grad_norm": 4.704795620556558, + "learning_rate": 2.56285514242903e-06, + "loss": 0.5637, + "step": 6589 + }, + { + "epoch": 0.27193199636873816, + "grad_norm": 2.39004214229941, + "learning_rate": 2.5627136689616662e-06, + "loss": 0.5329, + "step": 6590 + }, + { + "epoch": 0.2719732607080961, + "grad_norm": 5.593396816487333, + "learning_rate": 2.5625721765115775e-06, + "loss": 0.5047, + "step": 6591 + }, + { + "epoch": 0.272014525047454, + "grad_norm": 1.8776356498137008, + "learning_rate": 2.5624306650812914e-06, + "loss": 0.5365, + "step": 6592 + }, + { + "epoch": 0.2720557893868119, + "grad_norm": 4.379989318340361, + "learning_rate": 2.5622891346733363e-06, + "loss": 0.5816, + "step": 6593 + }, + { + "epoch": 0.2720970537261698, + "grad_norm": 4.452550362066403, + "learning_rate": 2.5621475852902394e-06, + "loss": 0.522, + "step": 6594 + }, + { + "epoch": 0.2721383180655278, + "grad_norm": 4.360129697829617, + "learning_rate": 2.5620060169345298e-06, + "loss": 0.5409, + "step": 6595 + }, + { + "epoch": 0.2721795824048857, + "grad_norm": 4.2655253972167495, + "learning_rate": 2.5618644296087355e-06, + "loss": 0.5585, + "step": 6596 + }, + { + "epoch": 0.2722208467442436, + "grad_norm": 2.9053722270472506, + "learning_rate": 2.5617228233153866e-06, + "loss": 0.5845, + "step": 6597 + }, + { + "epoch": 0.27226211108360154, + "grad_norm": 3.109040286776571, + "learning_rate": 2.561581198057012e-06, + "loss": 0.5755, + "step": 6598 + }, + { + "epoch": 0.27230337542295946, + "grad_norm": 6.065645000895701, + "learning_rate": 2.5614395538361413e-06, + "loss": 0.5593, + "step": 6599 + }, + { + "epoch": 0.27234463976231743, + "grad_norm": 2.611565179399803, + "learning_rate": 2.561297890655305e-06, + "loss": 0.5468, + "step": 6600 + }, + { + "epoch": 0.27238590410167535, + "grad_norm": 2.3009123379392205, + "learning_rate": 2.5611562085170333e-06, + "loss": 0.5624, + "step": 6601 + }, + { + "epoch": 0.27242716844103326, + "grad_norm": 2.5574927531632645, + "learning_rate": 2.5610145074238573e-06, + "loss": 0.5681, + "step": 6602 + }, + { + "epoch": 0.2724684327803912, + "grad_norm": 2.372286477816127, + "learning_rate": 2.5608727873783078e-06, + "loss": 0.56, + "step": 6603 + }, + { + "epoch": 0.2725096971197491, + "grad_norm": 2.5010424292545066, + "learning_rate": 2.5607310483829164e-06, + "loss": 0.6018, + "step": 6604 + }, + { + "epoch": 0.27255096145910707, + "grad_norm": 3.246900158570594, + "learning_rate": 2.5605892904402153e-06, + "loss": 0.5885, + "step": 6605 + }, + { + "epoch": 0.272592225798465, + "grad_norm": 14.901944195125653, + "learning_rate": 2.560447513552736e-06, + "loss": 0.5016, + "step": 6606 + }, + { + "epoch": 0.2726334901378229, + "grad_norm": 4.109190630716464, + "learning_rate": 2.560305717723011e-06, + "loss": 0.5358, + "step": 6607 + }, + { + "epoch": 0.2726747544771808, + "grad_norm": 3.3509051763038697, + "learning_rate": 2.560163902953574e-06, + "loss": 0.5503, + "step": 6608 + }, + { + "epoch": 0.27271601881653873, + "grad_norm": 7.4985634698243695, + "learning_rate": 2.560022069246957e-06, + "loss": 0.5402, + "step": 6609 + }, + { + "epoch": 0.2727572831558967, + "grad_norm": 2.350096721027273, + "learning_rate": 2.5598802166056947e-06, + "loss": 0.5506, + "step": 6610 + }, + { + "epoch": 0.2727985474952546, + "grad_norm": 2.761190215904765, + "learning_rate": 2.5597383450323197e-06, + "loss": 0.5503, + "step": 6611 + }, + { + "epoch": 0.27283981183461253, + "grad_norm": 23.95385061039814, + "learning_rate": 2.559596454529367e-06, + "loss": 0.5332, + "step": 6612 + }, + { + "epoch": 0.27288107617397045, + "grad_norm": 3.2744337379026986, + "learning_rate": 2.559454545099371e-06, + "loss": 0.5649, + "step": 6613 + }, + { + "epoch": 0.27292234051332837, + "grad_norm": 2.825742950756598, + "learning_rate": 2.559312616744866e-06, + "loss": 0.5655, + "step": 6614 + }, + { + "epoch": 0.2729636048526863, + "grad_norm": 3.379067626090938, + "learning_rate": 2.5591706694683887e-06, + "loss": 0.559, + "step": 6615 + }, + { + "epoch": 0.27300486919204425, + "grad_norm": 15.882690633929826, + "learning_rate": 2.5590287032724727e-06, + "loss": 0.583, + "step": 6616 + }, + { + "epoch": 0.27304613353140217, + "grad_norm": 2.553029025516127, + "learning_rate": 2.558886718159655e-06, + "loss": 0.5653, + "step": 6617 + }, + { + "epoch": 0.2730873978707601, + "grad_norm": 2.505833953254098, + "learning_rate": 2.558744714132472e-06, + "loss": 0.5691, + "step": 6618 + }, + { + "epoch": 0.273128662210118, + "grad_norm": 2.384227683636142, + "learning_rate": 2.5586026911934594e-06, + "loss": 0.537, + "step": 6619 + }, + { + "epoch": 0.2731699265494759, + "grad_norm": 3.2595198804113394, + "learning_rate": 2.5584606493451545e-06, + "loss": 0.5309, + "step": 6620 + }, + { + "epoch": 0.2732111908888339, + "grad_norm": 3.1419352143637593, + "learning_rate": 2.5583185885900946e-06, + "loss": 0.5705, + "step": 6621 + }, + { + "epoch": 0.2732524552281918, + "grad_norm": 3.506802048695202, + "learning_rate": 2.5581765089308175e-06, + "loss": 0.5024, + "step": 6622 + }, + { + "epoch": 0.2732937195675497, + "grad_norm": 2.893891076752809, + "learning_rate": 2.5580344103698603e-06, + "loss": 0.514, + "step": 6623 + }, + { + "epoch": 0.27333498390690764, + "grad_norm": 3.037045653954489, + "learning_rate": 2.557892292909762e-06, + "loss": 0.5113, + "step": 6624 + }, + { + "epoch": 0.27337624824626555, + "grad_norm": 2.902757775116735, + "learning_rate": 2.557750156553061e-06, + "loss": 0.5323, + "step": 6625 + }, + { + "epoch": 0.2734175125856235, + "grad_norm": 2.9924365251812337, + "learning_rate": 2.557608001302296e-06, + "loss": 0.5261, + "step": 6626 + }, + { + "epoch": 0.27345877692498144, + "grad_norm": 4.603763054156774, + "learning_rate": 2.5574658271600065e-06, + "loss": 0.527, + "step": 6627 + }, + { + "epoch": 0.27350004126433936, + "grad_norm": 4.245856845548406, + "learning_rate": 2.5573236341287317e-06, + "loss": 0.5545, + "step": 6628 + }, + { + "epoch": 0.27354130560369727, + "grad_norm": 6.523382754228572, + "learning_rate": 2.557181422211012e-06, + "loss": 0.572, + "step": 6629 + }, + { + "epoch": 0.2735825699430552, + "grad_norm": 3.9442408001505536, + "learning_rate": 2.5570391914093873e-06, + "loss": 0.5544, + "step": 6630 + }, + { + "epoch": 0.27362383428241316, + "grad_norm": 3.2730492323416316, + "learning_rate": 2.5568969417263984e-06, + "loss": 0.5707, + "step": 6631 + }, + { + "epoch": 0.2736650986217711, + "grad_norm": 5.607333165397671, + "learning_rate": 2.5567546731645857e-06, + "loss": 0.543, + "step": 6632 + }, + { + "epoch": 0.273706362961129, + "grad_norm": 2.4660448013842187, + "learning_rate": 2.556612385726492e-06, + "loss": 0.5336, + "step": 6633 + }, + { + "epoch": 0.2737476273004869, + "grad_norm": 9.15131447658822, + "learning_rate": 2.556470079414657e-06, + "loss": 0.579, + "step": 6634 + }, + { + "epoch": 0.2737888916398448, + "grad_norm": 3.044366842392887, + "learning_rate": 2.5563277542316238e-06, + "loss": 0.5508, + "step": 6635 + }, + { + "epoch": 0.2738301559792028, + "grad_norm": 7.755160022424827, + "learning_rate": 2.5561854101799344e-06, + "loss": 0.5446, + "step": 6636 + }, + { + "epoch": 0.2738714203185607, + "grad_norm": 2.341292821077137, + "learning_rate": 2.556043047262131e-06, + "loss": 0.5744, + "step": 6637 + }, + { + "epoch": 0.2739126846579186, + "grad_norm": 4.696465560956235, + "learning_rate": 2.5559006654807574e-06, + "loss": 0.5302, + "step": 6638 + }, + { + "epoch": 0.27395394899727654, + "grad_norm": 2.664979378591592, + "learning_rate": 2.5557582648383563e-06, + "loss": 0.5127, + "step": 6639 + }, + { + "epoch": 0.27399521333663446, + "grad_norm": 2.2979250567926273, + "learning_rate": 2.555615845337472e-06, + "loss": 0.5954, + "step": 6640 + }, + { + "epoch": 0.27403647767599243, + "grad_norm": 2.925209304499725, + "learning_rate": 2.555473406980647e-06, + "loss": 0.5167, + "step": 6641 + }, + { + "epoch": 0.27407774201535035, + "grad_norm": 2.3079481686643293, + "learning_rate": 2.555330949770427e-06, + "loss": 0.5405, + "step": 6642 + }, + { + "epoch": 0.27411900635470826, + "grad_norm": 5.64866493243998, + "learning_rate": 2.5551884737093563e-06, + "loss": 0.5406, + "step": 6643 + }, + { + "epoch": 0.2741602706940662, + "grad_norm": 4.080858961891911, + "learning_rate": 2.5550459787999797e-06, + "loss": 0.5822, + "step": 6644 + }, + { + "epoch": 0.2742015350334241, + "grad_norm": 4.086892686126311, + "learning_rate": 2.554903465044843e-06, + "loss": 0.5578, + "step": 6645 + }, + { + "epoch": 0.27424279937278206, + "grad_norm": 3.613803028197115, + "learning_rate": 2.554760932446491e-06, + "loss": 0.5339, + "step": 6646 + }, + { + "epoch": 0.27428406371214, + "grad_norm": 2.788975366093601, + "learning_rate": 2.5546183810074697e-06, + "loss": 0.5613, + "step": 6647 + }, + { + "epoch": 0.2743253280514979, + "grad_norm": 2.3074239837379453, + "learning_rate": 2.554475810730327e-06, + "loss": 0.5571, + "step": 6648 + }, + { + "epoch": 0.2743665923908558, + "grad_norm": 5.062504820251363, + "learning_rate": 2.5543332216176073e-06, + "loss": 0.542, + "step": 6649 + }, + { + "epoch": 0.27440785673021373, + "grad_norm": 1.9234947828914795, + "learning_rate": 2.5541906136718595e-06, + "loss": 0.5424, + "step": 6650 + }, + { + "epoch": 0.2744491210695717, + "grad_norm": 3.4737168937708653, + "learning_rate": 2.5540479868956296e-06, + "loss": 0.5437, + "step": 6651 + }, + { + "epoch": 0.2744903854089296, + "grad_norm": 3.172980341816429, + "learning_rate": 2.553905341291466e-06, + "loss": 0.5752, + "step": 6652 + }, + { + "epoch": 0.27453164974828753, + "grad_norm": 2.496244222301469, + "learning_rate": 2.553762676861917e-06, + "loss": 0.5073, + "step": 6653 + }, + { + "epoch": 0.27457291408764545, + "grad_norm": 2.426941573415084, + "learning_rate": 2.5536199936095304e-06, + "loss": 0.5319, + "step": 6654 + }, + { + "epoch": 0.27461417842700336, + "grad_norm": 1.9020893912245325, + "learning_rate": 2.5534772915368547e-06, + "loss": 0.5762, + "step": 6655 + }, + { + "epoch": 0.27465544276636134, + "grad_norm": 14.096927495711839, + "learning_rate": 2.5533345706464386e-06, + "loss": 0.5144, + "step": 6656 + }, + { + "epoch": 0.27469670710571925, + "grad_norm": 2.6968068742029634, + "learning_rate": 2.5531918309408327e-06, + "loss": 0.5306, + "step": 6657 + }, + { + "epoch": 0.27473797144507717, + "grad_norm": 2.6731120555999133, + "learning_rate": 2.553049072422586e-06, + "loss": 0.5605, + "step": 6658 + }, + { + "epoch": 0.2747792357844351, + "grad_norm": 2.4955643060744053, + "learning_rate": 2.5529062950942487e-06, + "loss": 0.6039, + "step": 6659 + }, + { + "epoch": 0.274820500123793, + "grad_norm": 9.810043191648022, + "learning_rate": 2.5527634989583706e-06, + "loss": 0.578, + "step": 6660 + }, + { + "epoch": 0.27486176446315097, + "grad_norm": 6.109405301849962, + "learning_rate": 2.552620684017503e-06, + "loss": 0.5488, + "step": 6661 + }, + { + "epoch": 0.2749030288025089, + "grad_norm": 2.1855016276449084, + "learning_rate": 2.552477850274197e-06, + "loss": 0.6051, + "step": 6662 + }, + { + "epoch": 0.2749442931418668, + "grad_norm": 3.851082851701526, + "learning_rate": 2.5523349977310033e-06, + "loss": 0.5627, + "step": 6663 + }, + { + "epoch": 0.2749855574812247, + "grad_norm": 6.591273470040476, + "learning_rate": 2.552192126390474e-06, + "loss": 0.5437, + "step": 6664 + }, + { + "epoch": 0.27502682182058263, + "grad_norm": 5.076153920937602, + "learning_rate": 2.5520492362551612e-06, + "loss": 0.5984, + "step": 6665 + }, + { + "epoch": 0.2750680861599406, + "grad_norm": 3.361766178570864, + "learning_rate": 2.5519063273276174e-06, + "loss": 0.5346, + "step": 6666 + }, + { + "epoch": 0.2751093504992985, + "grad_norm": 2.3078874910875316, + "learning_rate": 2.551763399610395e-06, + "loss": 0.5451, + "step": 6667 + }, + { + "epoch": 0.27515061483865644, + "grad_norm": 4.076861409388344, + "learning_rate": 2.551620453106047e-06, + "loss": 0.496, + "step": 6668 + }, + { + "epoch": 0.27519187917801435, + "grad_norm": 8.66466695292742, + "learning_rate": 2.551477487817127e-06, + "loss": 0.5656, + "step": 6669 + }, + { + "epoch": 0.27523314351737227, + "grad_norm": 4.992458453960143, + "learning_rate": 2.551334503746189e-06, + "loss": 0.5145, + "step": 6670 + }, + { + "epoch": 0.27527440785673024, + "grad_norm": 1.9941375042976932, + "learning_rate": 2.5511915008957864e-06, + "loss": 0.5204, + "step": 6671 + }, + { + "epoch": 0.27531567219608816, + "grad_norm": 2.559692178265411, + "learning_rate": 2.551048479268474e-06, + "loss": 0.5447, + "step": 6672 + }, + { + "epoch": 0.2753569365354461, + "grad_norm": 57.41425689925508, + "learning_rate": 2.5509054388668065e-06, + "loss": 0.5251, + "step": 6673 + }, + { + "epoch": 0.275398200874804, + "grad_norm": 2.419562144705264, + "learning_rate": 2.550762379693339e-06, + "loss": 0.4996, + "step": 6674 + }, + { + "epoch": 0.2754394652141619, + "grad_norm": 3.2387676889617327, + "learning_rate": 2.550619301750627e-06, + "loss": 0.5923, + "step": 6675 + }, + { + "epoch": 0.2754807295535198, + "grad_norm": 3.1928170194821046, + "learning_rate": 2.550476205041225e-06, + "loss": 0.4667, + "step": 6676 + }, + { + "epoch": 0.2755219938928778, + "grad_norm": 3.751819888134986, + "learning_rate": 2.550333089567691e-06, + "loss": 0.5683, + "step": 6677 + }, + { + "epoch": 0.2755632582322357, + "grad_norm": 3.5054592145556884, + "learning_rate": 2.55018995533258e-06, + "loss": 0.5303, + "step": 6678 + }, + { + "epoch": 0.2756045225715936, + "grad_norm": 2.4086051849969783, + "learning_rate": 2.5500468023384497e-06, + "loss": 0.5357, + "step": 6679 + }, + { + "epoch": 0.27564578691095154, + "grad_norm": 3.6037006700613206, + "learning_rate": 2.5499036305878563e-06, + "loss": 0.4943, + "step": 6680 + }, + { + "epoch": 0.27568705125030946, + "grad_norm": 3.824000200348857, + "learning_rate": 2.549760440083358e-06, + "loss": 0.5237, + "step": 6681 + }, + { + "epoch": 0.2757283155896674, + "grad_norm": 2.4365141569508513, + "learning_rate": 2.549617230827512e-06, + "loss": 0.4758, + "step": 6682 + }, + { + "epoch": 0.27576957992902534, + "grad_norm": 3.1614995856326513, + "learning_rate": 2.5494740028228762e-06, + "loss": 0.5799, + "step": 6683 + }, + { + "epoch": 0.27581084426838326, + "grad_norm": 4.15027855181469, + "learning_rate": 2.5493307560720097e-06, + "loss": 0.5659, + "step": 6684 + }, + { + "epoch": 0.2758521086077412, + "grad_norm": 2.7888622546024995, + "learning_rate": 2.5491874905774705e-06, + "loss": 0.5394, + "step": 6685 + }, + { + "epoch": 0.2758933729470991, + "grad_norm": 1.7802621345846537, + "learning_rate": 2.5490442063418186e-06, + "loss": 0.5191, + "step": 6686 + }, + { + "epoch": 0.27593463728645706, + "grad_norm": 2.7177980792997714, + "learning_rate": 2.5489009033676126e-06, + "loss": 0.5588, + "step": 6687 + }, + { + "epoch": 0.275975901625815, + "grad_norm": 2.7833142263471493, + "learning_rate": 2.548757581657412e-06, + "loss": 0.5129, + "step": 6688 + }, + { + "epoch": 0.2760171659651729, + "grad_norm": 2.349405000492631, + "learning_rate": 2.548614241213778e-06, + "loss": 0.5458, + "step": 6689 + }, + { + "epoch": 0.2760584303045308, + "grad_norm": 4.466161944453445, + "learning_rate": 2.5484708820392704e-06, + "loss": 0.5185, + "step": 6690 + }, + { + "epoch": 0.2760996946438887, + "grad_norm": 4.6010393050312866, + "learning_rate": 2.54832750413645e-06, + "loss": 0.5194, + "step": 6691 + }, + { + "epoch": 0.2761409589832467, + "grad_norm": 4.158407723081223, + "learning_rate": 2.5481841075078776e-06, + "loss": 0.5477, + "step": 6692 + }, + { + "epoch": 0.2761822233226046, + "grad_norm": 2.2034520436301985, + "learning_rate": 2.548040692156115e-06, + "loss": 0.5342, + "step": 6693 + }, + { + "epoch": 0.27622348766196253, + "grad_norm": 5.0686482695486195, + "learning_rate": 2.547897258083724e-06, + "loss": 0.5607, + "step": 6694 + }, + { + "epoch": 0.27626475200132045, + "grad_norm": 2.859258509457204, + "learning_rate": 2.5477538052932665e-06, + "loss": 0.6033, + "step": 6695 + }, + { + "epoch": 0.27630601634067836, + "grad_norm": 2.5414689662472583, + "learning_rate": 2.547610333787305e-06, + "loss": 0.5527, + "step": 6696 + }, + { + "epoch": 0.27634728068003633, + "grad_norm": 2.4036004069548365, + "learning_rate": 2.5474668435684017e-06, + "loss": 0.466, + "step": 6697 + }, + { + "epoch": 0.27638854501939425, + "grad_norm": 2.66139345588413, + "learning_rate": 2.547323334639121e-06, + "loss": 0.5496, + "step": 6698 + }, + { + "epoch": 0.27642980935875217, + "grad_norm": 4.379030866565856, + "learning_rate": 2.547179807002025e-06, + "loss": 0.5385, + "step": 6699 + }, + { + "epoch": 0.2764710736981101, + "grad_norm": 3.1848143759822656, + "learning_rate": 2.5470362606596783e-06, + "loss": 0.5433, + "step": 6700 + }, + { + "epoch": 0.276512338037468, + "grad_norm": 3.8040668925806376, + "learning_rate": 2.546892695614644e-06, + "loss": 0.5338, + "step": 6701 + }, + { + "epoch": 0.27655360237682597, + "grad_norm": 28.816714262138582, + "learning_rate": 2.546749111869488e-06, + "loss": 0.5415, + "step": 6702 + }, + { + "epoch": 0.2765948667161839, + "grad_norm": 1.9391853067430247, + "learning_rate": 2.546605509426774e-06, + "loss": 0.52, + "step": 6703 + }, + { + "epoch": 0.2766361310555418, + "grad_norm": 15.268623391333602, + "learning_rate": 2.5464618882890677e-06, + "loss": 0.5331, + "step": 6704 + }, + { + "epoch": 0.2766773953948997, + "grad_norm": 4.267631796100238, + "learning_rate": 2.546318248458934e-06, + "loss": 0.5289, + "step": 6705 + }, + { + "epoch": 0.27671865973425763, + "grad_norm": 2.7231600343017983, + "learning_rate": 2.546174589938939e-06, + "loss": 0.5693, + "step": 6706 + }, + { + "epoch": 0.2767599240736156, + "grad_norm": 2.087774794077687, + "learning_rate": 2.5460309127316486e-06, + "loss": 0.5588, + "step": 6707 + }, + { + "epoch": 0.2768011884129735, + "grad_norm": 4.623453795764911, + "learning_rate": 2.5458872168396293e-06, + "loss": 0.5808, + "step": 6708 + }, + { + "epoch": 0.27684245275233144, + "grad_norm": 6.8156715571237365, + "learning_rate": 2.545743502265448e-06, + "loss": 0.5768, + "step": 6709 + }, + { + "epoch": 0.27688371709168935, + "grad_norm": 5.94857610684072, + "learning_rate": 2.545599769011671e-06, + "loss": 0.5701, + "step": 6710 + }, + { + "epoch": 0.27692498143104727, + "grad_norm": 3.820446643453378, + "learning_rate": 2.5454560170808675e-06, + "loss": 0.5493, + "step": 6711 + }, + { + "epoch": 0.27696624577040524, + "grad_norm": 3.315173103579826, + "learning_rate": 2.5453122464756032e-06, + "loss": 0.5488, + "step": 6712 + }, + { + "epoch": 0.27700751010976316, + "grad_norm": 2.1090849930731417, + "learning_rate": 2.545168457198448e-06, + "loss": 0.4861, + "step": 6713 + }, + { + "epoch": 0.27704877444912107, + "grad_norm": 18.1516140214829, + "learning_rate": 2.5450246492519696e-06, + "loss": 0.5577, + "step": 6714 + }, + { + "epoch": 0.277090038788479, + "grad_norm": 3.5671541141328276, + "learning_rate": 2.544880822638736e-06, + "loss": 0.5871, + "step": 6715 + }, + { + "epoch": 0.2771313031278369, + "grad_norm": 3.545300617852325, + "learning_rate": 2.5447369773613177e-06, + "loss": 0.542, + "step": 6716 + }, + { + "epoch": 0.2771725674671949, + "grad_norm": 4.164137426263828, + "learning_rate": 2.5445931134222833e-06, + "loss": 0.5068, + "step": 6717 + }, + { + "epoch": 0.2772138318065528, + "grad_norm": 2.6687553485739404, + "learning_rate": 2.5444492308242022e-06, + "loss": 0.5873, + "step": 6718 + }, + { + "epoch": 0.2772550961459107, + "grad_norm": 3.3459689006037294, + "learning_rate": 2.5443053295696454e-06, + "loss": 0.5092, + "step": 6719 + }, + { + "epoch": 0.2772963604852686, + "grad_norm": 5.042384448738477, + "learning_rate": 2.544161409661183e-06, + "loss": 0.5326, + "step": 6720 + }, + { + "epoch": 0.27733762482462654, + "grad_norm": 2.772434022917928, + "learning_rate": 2.544017471101386e-06, + "loss": 0.531, + "step": 6721 + }, + { + "epoch": 0.2773788891639845, + "grad_norm": 2.583819584744984, + "learning_rate": 2.5438735138928247e-06, + "loss": 0.5241, + "step": 6722 + }, + { + "epoch": 0.2774201535033424, + "grad_norm": 5.713600338837079, + "learning_rate": 2.5437295380380717e-06, + "loss": 0.539, + "step": 6723 + }, + { + "epoch": 0.27746141784270034, + "grad_norm": 1.9349214462685858, + "learning_rate": 2.5435855435396975e-06, + "loss": 0.5222, + "step": 6724 + }, + { + "epoch": 0.27750268218205826, + "grad_norm": 2.634159096420467, + "learning_rate": 2.5434415304002753e-06, + "loss": 0.6106, + "step": 6725 + }, + { + "epoch": 0.2775439465214162, + "grad_norm": 2.0930358099679123, + "learning_rate": 2.543297498622377e-06, + "loss": 0.6029, + "step": 6726 + }, + { + "epoch": 0.27758521086077415, + "grad_norm": 5.973665135673057, + "learning_rate": 2.5431534482085753e-06, + "loss": 0.5305, + "step": 6727 + }, + { + "epoch": 0.27762647520013206, + "grad_norm": 3.4538795662100306, + "learning_rate": 2.543009379161443e-06, + "loss": 0.5466, + "step": 6728 + }, + { + "epoch": 0.27766773953949, + "grad_norm": 2.422196752408176, + "learning_rate": 2.542865291483555e-06, + "loss": 0.5635, + "step": 6729 + }, + { + "epoch": 0.2777090038788479, + "grad_norm": 3.479982264911786, + "learning_rate": 2.5427211851774834e-06, + "loss": 0.521, + "step": 6730 + }, + { + "epoch": 0.2777502682182058, + "grad_norm": 12.818615953709234, + "learning_rate": 2.542577060245803e-06, + "loss": 0.5359, + "step": 6731 + }, + { + "epoch": 0.2777915325575638, + "grad_norm": 14.57283360961153, + "learning_rate": 2.5424329166910882e-06, + "loss": 0.5798, + "step": 6732 + }, + { + "epoch": 0.2778327968969217, + "grad_norm": 2.4667310496565618, + "learning_rate": 2.542288754515914e-06, + "loss": 0.5338, + "step": 6733 + }, + { + "epoch": 0.2778740612362796, + "grad_norm": 3.3488521341533133, + "learning_rate": 2.5421445737228545e-06, + "loss": 0.5644, + "step": 6734 + }, + { + "epoch": 0.27791532557563753, + "grad_norm": 9.52347592593978, + "learning_rate": 2.5420003743144867e-06, + "loss": 0.5679, + "step": 6735 + }, + { + "epoch": 0.27795658991499544, + "grad_norm": 5.597144055559447, + "learning_rate": 2.541856156293385e-06, + "loss": 0.6062, + "step": 6736 + }, + { + "epoch": 0.27799785425435336, + "grad_norm": 2.987149974207904, + "learning_rate": 2.5417119196621264e-06, + "loss": 0.5662, + "step": 6737 + }, + { + "epoch": 0.27803911859371133, + "grad_norm": 21.193871379628025, + "learning_rate": 2.5415676644232865e-06, + "loss": 0.5616, + "step": 6738 + }, + { + "epoch": 0.27808038293306925, + "grad_norm": 2.8140910984814793, + "learning_rate": 2.541423390579443e-06, + "loss": 0.5769, + "step": 6739 + }, + { + "epoch": 0.27812164727242716, + "grad_norm": 2.07895412076152, + "learning_rate": 2.541279098133172e-06, + "loss": 0.5617, + "step": 6740 + }, + { + "epoch": 0.2781629116117851, + "grad_norm": 2.815165691509318, + "learning_rate": 2.541134787087052e-06, + "loss": 0.573, + "step": 6741 + }, + { + "epoch": 0.278204175951143, + "grad_norm": 2.4740445458237743, + "learning_rate": 2.5409904574436597e-06, + "loss": 0.577, + "step": 6742 + }, + { + "epoch": 0.27824544029050097, + "grad_norm": 5.201701340203794, + "learning_rate": 2.540846109205574e-06, + "loss": 0.5643, + "step": 6743 + }, + { + "epoch": 0.2782867046298589, + "grad_norm": 6.511754820417304, + "learning_rate": 2.5407017423753727e-06, + "loss": 0.5006, + "step": 6744 + }, + { + "epoch": 0.2783279689692168, + "grad_norm": 3.5508720220036234, + "learning_rate": 2.540557356955635e-06, + "loss": 0.5185, + "step": 6745 + }, + { + "epoch": 0.2783692333085747, + "grad_norm": 2.172432072134388, + "learning_rate": 2.54041295294894e-06, + "loss": 0.5971, + "step": 6746 + }, + { + "epoch": 0.27841049764793263, + "grad_norm": 12.586195509720701, + "learning_rate": 2.5402685303578664e-06, + "loss": 0.5091, + "step": 6747 + }, + { + "epoch": 0.2784517619872906, + "grad_norm": 2.4937690575484774, + "learning_rate": 2.540124089184995e-06, + "loss": 0.5606, + "step": 6748 + }, + { + "epoch": 0.2784930263266485, + "grad_norm": 3.681586268339685, + "learning_rate": 2.5399796294329053e-06, + "loss": 0.5348, + "step": 6749 + }, + { + "epoch": 0.27853429066600643, + "grad_norm": 4.616961886548428, + "learning_rate": 2.5398351511041774e-06, + "loss": 0.5957, + "step": 6750 + }, + { + "epoch": 0.27857555500536435, + "grad_norm": 3.795850306027117, + "learning_rate": 2.5396906542013933e-06, + "loss": 0.5818, + "step": 6751 + }, + { + "epoch": 0.27861681934472227, + "grad_norm": 4.362500273258582, + "learning_rate": 2.5395461387271323e-06, + "loss": 0.5284, + "step": 6752 + }, + { + "epoch": 0.27865808368408024, + "grad_norm": 2.558505548266953, + "learning_rate": 2.539401604683977e-06, + "loss": 0.4872, + "step": 6753 + }, + { + "epoch": 0.27869934802343815, + "grad_norm": 3.952964367560089, + "learning_rate": 2.5392570520745097e-06, + "loss": 0.5334, + "step": 6754 + }, + { + "epoch": 0.27874061236279607, + "grad_norm": 2.7418257800208616, + "learning_rate": 2.5391124809013107e-06, + "loss": 0.557, + "step": 6755 + }, + { + "epoch": 0.278781876702154, + "grad_norm": 4.394224443494552, + "learning_rate": 2.538967891166963e-06, + "loss": 0.5188, + "step": 6756 + }, + { + "epoch": 0.2788231410415119, + "grad_norm": 5.226450705339999, + "learning_rate": 2.5388232828740505e-06, + "loss": 0.5921, + "step": 6757 + }, + { + "epoch": 0.2788644053808699, + "grad_norm": 2.3908047301700663, + "learning_rate": 2.5386786560251548e-06, + "loss": 0.5584, + "step": 6758 + }, + { + "epoch": 0.2789056697202278, + "grad_norm": 3.459337960826037, + "learning_rate": 2.5385340106228604e-06, + "loss": 0.5661, + "step": 6759 + }, + { + "epoch": 0.2789469340595857, + "grad_norm": 2.150512542429582, + "learning_rate": 2.5383893466697505e-06, + "loss": 0.557, + "step": 6760 + }, + { + "epoch": 0.2789881983989436, + "grad_norm": 3.561234996819663, + "learning_rate": 2.5382446641684087e-06, + "loss": 0.5284, + "step": 6761 + }, + { + "epoch": 0.27902946273830154, + "grad_norm": 2.786533398599554, + "learning_rate": 2.5380999631214203e-06, + "loss": 0.5631, + "step": 6762 + }, + { + "epoch": 0.2790707270776595, + "grad_norm": 3.3587350844160797, + "learning_rate": 2.5379552435313697e-06, + "loss": 0.5232, + "step": 6763 + }, + { + "epoch": 0.2791119914170174, + "grad_norm": 3.204763621634502, + "learning_rate": 2.537810505400842e-06, + "loss": 0.5792, + "step": 6764 + }, + { + "epoch": 0.27915325575637534, + "grad_norm": 3.3681374931514347, + "learning_rate": 2.5376657487324223e-06, + "loss": 0.5084, + "step": 6765 + }, + { + "epoch": 0.27919452009573326, + "grad_norm": 10.817205627116259, + "learning_rate": 2.5375209735286964e-06, + "loss": 0.5194, + "step": 6766 + }, + { + "epoch": 0.27923578443509117, + "grad_norm": 2.0462519550571936, + "learning_rate": 2.5373761797922504e-06, + "loss": 0.5634, + "step": 6767 + }, + { + "epoch": 0.27927704877444914, + "grad_norm": 2.2660578423551203, + "learning_rate": 2.5372313675256708e-06, + "loss": 0.5292, + "step": 6768 + }, + { + "epoch": 0.27931831311380706, + "grad_norm": 4.756676212718302, + "learning_rate": 2.5370865367315443e-06, + "loss": 0.5378, + "step": 6769 + }, + { + "epoch": 0.279359577453165, + "grad_norm": 3.366562999867166, + "learning_rate": 2.536941687412458e-06, + "loss": 0.5941, + "step": 6770 + }, + { + "epoch": 0.2794008417925229, + "grad_norm": 3.2338021384910225, + "learning_rate": 2.5367968195709985e-06, + "loss": 0.5014, + "step": 6771 + }, + { + "epoch": 0.2794421061318808, + "grad_norm": 2.4159908976058824, + "learning_rate": 2.536651933209755e-06, + "loss": 0.5535, + "step": 6772 + }, + { + "epoch": 0.2794833704712388, + "grad_norm": 5.322680865666698, + "learning_rate": 2.5365070283313142e-06, + "loss": 0.5375, + "step": 6773 + }, + { + "epoch": 0.2795246348105967, + "grad_norm": 2.6671665428241567, + "learning_rate": 2.5363621049382645e-06, + "loss": 0.5453, + "step": 6774 + }, + { + "epoch": 0.2795658991499546, + "grad_norm": 2.249462966691787, + "learning_rate": 2.5362171630331957e-06, + "loss": 0.5526, + "step": 6775 + }, + { + "epoch": 0.2796071634893125, + "grad_norm": 3.5079902163948753, + "learning_rate": 2.5360722026186956e-06, + "loss": 0.5492, + "step": 6776 + }, + { + "epoch": 0.27964842782867044, + "grad_norm": 4.527726933456592, + "learning_rate": 2.5359272236973545e-06, + "loss": 0.5709, + "step": 6777 + }, + { + "epoch": 0.2796896921680284, + "grad_norm": 6.637953164570811, + "learning_rate": 2.5357822262717617e-06, + "loss": 0.5624, + "step": 6778 + }, + { + "epoch": 0.27973095650738633, + "grad_norm": 5.761698064019463, + "learning_rate": 2.5356372103445074e-06, + "loss": 0.5408, + "step": 6779 + }, + { + "epoch": 0.27977222084674425, + "grad_norm": 2.5597457263447114, + "learning_rate": 2.5354921759181814e-06, + "loss": 0.5064, + "step": 6780 + }, + { + "epoch": 0.27981348518610216, + "grad_norm": 3.974254462473301, + "learning_rate": 2.5353471229953752e-06, + "loss": 0.5295, + "step": 6781 + }, + { + "epoch": 0.2798547495254601, + "grad_norm": 2.9534712733793445, + "learning_rate": 2.5352020515786787e-06, + "loss": 0.5372, + "step": 6782 + }, + { + "epoch": 0.27989601386481805, + "grad_norm": 4.346659875424164, + "learning_rate": 2.5350569616706842e-06, + "loss": 0.5015, + "step": 6783 + }, + { + "epoch": 0.27993727820417597, + "grad_norm": 3.8285375512725146, + "learning_rate": 2.534911853273983e-06, + "loss": 0.5317, + "step": 6784 + }, + { + "epoch": 0.2799785425435339, + "grad_norm": 2.476038372754913, + "learning_rate": 2.5347667263911667e-06, + "loss": 0.6283, + "step": 6785 + }, + { + "epoch": 0.2800198068828918, + "grad_norm": 2.3234936601996377, + "learning_rate": 2.5346215810248287e-06, + "loss": 0.5805, + "step": 6786 + }, + { + "epoch": 0.2800610712222497, + "grad_norm": 3.321894876498337, + "learning_rate": 2.5344764171775613e-06, + "loss": 0.529, + "step": 6787 + }, + { + "epoch": 0.2801023355616077, + "grad_norm": 3.054169166476962, + "learning_rate": 2.5343312348519564e-06, + "loss": 0.5434, + "step": 6788 + }, + { + "epoch": 0.2801435999009656, + "grad_norm": 3.084642245795941, + "learning_rate": 2.5341860340506083e-06, + "loss": 0.5954, + "step": 6789 + }, + { + "epoch": 0.2801848642403235, + "grad_norm": 3.684812885860624, + "learning_rate": 2.534040814776111e-06, + "loss": 0.4891, + "step": 6790 + }, + { + "epoch": 0.28022612857968143, + "grad_norm": 3.0603583428236134, + "learning_rate": 2.5338955770310577e-06, + "loss": 0.5974, + "step": 6791 + }, + { + "epoch": 0.28026739291903935, + "grad_norm": 2.795225407018975, + "learning_rate": 2.5337503208180432e-06, + "loss": 0.5607, + "step": 6792 + }, + { + "epoch": 0.2803086572583973, + "grad_norm": 4.238096602533699, + "learning_rate": 2.5336050461396614e-06, + "loss": 0.5367, + "step": 6793 + }, + { + "epoch": 0.28034992159775524, + "grad_norm": 2.823589752456971, + "learning_rate": 2.533459752998508e-06, + "loss": 0.4952, + "step": 6794 + }, + { + "epoch": 0.28039118593711315, + "grad_norm": 3.106934275411522, + "learning_rate": 2.533314441397178e-06, + "loss": 0.5913, + "step": 6795 + }, + { + "epoch": 0.28043245027647107, + "grad_norm": 2.7399272342371828, + "learning_rate": 2.5331691113382677e-06, + "loss": 0.603, + "step": 6796 + }, + { + "epoch": 0.280473714615829, + "grad_norm": 3.432978288394488, + "learning_rate": 2.5330237628243716e-06, + "loss": 0.5178, + "step": 6797 + }, + { + "epoch": 0.2805149789551869, + "grad_norm": 2.2988044024298877, + "learning_rate": 2.5328783958580876e-06, + "loss": 0.5462, + "step": 6798 + }, + { + "epoch": 0.28055624329454487, + "grad_norm": 2.1957892604226856, + "learning_rate": 2.5327330104420114e-06, + "loss": 0.5069, + "step": 6799 + }, + { + "epoch": 0.2805975076339028, + "grad_norm": 4.837347002320394, + "learning_rate": 2.53258760657874e-06, + "loss": 0.6168, + "step": 6800 + }, + { + "epoch": 0.2806387719732607, + "grad_norm": 2.454068839393038, + "learning_rate": 2.532442184270871e-06, + "loss": 0.5622, + "step": 6801 + }, + { + "epoch": 0.2806800363126186, + "grad_norm": 46.38586779530386, + "learning_rate": 2.532296743521002e-06, + "loss": 0.5383, + "step": 6802 + }, + { + "epoch": 0.28072130065197654, + "grad_norm": 2.402730111728562, + "learning_rate": 2.5321512843317303e-06, + "loss": 0.5249, + "step": 6803 + }, + { + "epoch": 0.2807625649913345, + "grad_norm": 4.140376762299822, + "learning_rate": 2.5320058067056544e-06, + "loss": 0.5323, + "step": 6804 + }, + { + "epoch": 0.2808038293306924, + "grad_norm": 3.18035081215091, + "learning_rate": 2.5318603106453735e-06, + "loss": 0.5383, + "step": 6805 + }, + { + "epoch": 0.28084509367005034, + "grad_norm": 2.9096425388845066, + "learning_rate": 2.531714796153486e-06, + "loss": 0.5488, + "step": 6806 + }, + { + "epoch": 0.28088635800940825, + "grad_norm": 9.170996621464557, + "learning_rate": 2.531569263232592e-06, + "loss": 0.5573, + "step": 6807 + }, + { + "epoch": 0.28092762234876617, + "grad_norm": 6.1508186795286734, + "learning_rate": 2.5314237118852894e-06, + "loss": 0.5754, + "step": 6808 + }, + { + "epoch": 0.28096888668812414, + "grad_norm": 2.5958394511425245, + "learning_rate": 2.531278142114179e-06, + "loss": 0.5652, + "step": 6809 + }, + { + "epoch": 0.28101015102748206, + "grad_norm": 3.0009641884472953, + "learning_rate": 2.531132553921862e-06, + "loss": 0.479, + "step": 6810 + }, + { + "epoch": 0.28105141536684, + "grad_norm": 2.7367212397167715, + "learning_rate": 2.5309869473109375e-06, + "loss": 0.5228, + "step": 6811 + }, + { + "epoch": 0.2810926797061979, + "grad_norm": 2.853743119726044, + "learning_rate": 2.5308413222840073e-06, + "loss": 0.5197, + "step": 6812 + }, + { + "epoch": 0.2811339440455558, + "grad_norm": 10.354334451437186, + "learning_rate": 2.530695678843672e-06, + "loss": 0.6102, + "step": 6813 + }, + { + "epoch": 0.2811752083849138, + "grad_norm": 2.480516476878582, + "learning_rate": 2.5305500169925336e-06, + "loss": 0.5135, + "step": 6814 + }, + { + "epoch": 0.2812164727242717, + "grad_norm": 6.651414141521766, + "learning_rate": 2.530404336733194e-06, + "loss": 0.5367, + "step": 6815 + }, + { + "epoch": 0.2812577370636296, + "grad_norm": 3.503579762531835, + "learning_rate": 2.530258638068255e-06, + "loss": 0.5476, + "step": 6816 + }, + { + "epoch": 0.2812990014029875, + "grad_norm": 32.13097214972112, + "learning_rate": 2.5301129210003196e-06, + "loss": 0.5426, + "step": 6817 + }, + { + "epoch": 0.28134026574234544, + "grad_norm": 3.738749149445462, + "learning_rate": 2.5299671855319903e-06, + "loss": 0.5279, + "step": 6818 + }, + { + "epoch": 0.2813815300817034, + "grad_norm": 3.3555186986300924, + "learning_rate": 2.5298214316658706e-06, + "loss": 0.5512, + "step": 6819 + }, + { + "epoch": 0.28142279442106133, + "grad_norm": 4.6967603787842895, + "learning_rate": 2.529675659404564e-06, + "loss": 0.5708, + "step": 6820 + }, + { + "epoch": 0.28146405876041924, + "grad_norm": 1.9643071836297024, + "learning_rate": 2.529529868750674e-06, + "loss": 0.4916, + "step": 6821 + }, + { + "epoch": 0.28150532309977716, + "grad_norm": 2.901277284797318, + "learning_rate": 2.529384059706805e-06, + "loss": 0.5194, + "step": 6822 + }, + { + "epoch": 0.2815465874391351, + "grad_norm": 2.6357003403535786, + "learning_rate": 2.529238232275562e-06, + "loss": 0.5325, + "step": 6823 + }, + { + "epoch": 0.28158785177849305, + "grad_norm": 2.5603307597581195, + "learning_rate": 2.529092386459549e-06, + "loss": 0.5306, + "step": 6824 + }, + { + "epoch": 0.28162911611785096, + "grad_norm": 3.2841221005111687, + "learning_rate": 2.5289465222613724e-06, + "loss": 0.5286, + "step": 6825 + }, + { + "epoch": 0.2816703804572089, + "grad_norm": 2.497434835983409, + "learning_rate": 2.528800639683636e-06, + "loss": 0.5204, + "step": 6826 + }, + { + "epoch": 0.2817116447965668, + "grad_norm": 4.354007767137641, + "learning_rate": 2.5286547387289468e-06, + "loss": 0.5585, + "step": 6827 + }, + { + "epoch": 0.2817529091359247, + "grad_norm": 2.1827762341947508, + "learning_rate": 2.528508819399911e-06, + "loss": 0.5172, + "step": 6828 + }, + { + "epoch": 0.2817941734752827, + "grad_norm": 2.0640973195410703, + "learning_rate": 2.5283628816991344e-06, + "loss": 0.5388, + "step": 6829 + }, + { + "epoch": 0.2818354378146406, + "grad_norm": 3.194518647400452, + "learning_rate": 2.528216925629224e-06, + "loss": 0.519, + "step": 6830 + }, + { + "epoch": 0.2818767021539985, + "grad_norm": 3.1520632996574505, + "learning_rate": 2.528070951192787e-06, + "loss": 0.5023, + "step": 6831 + }, + { + "epoch": 0.28191796649335643, + "grad_norm": 2.586719850718664, + "learning_rate": 2.5279249583924317e-06, + "loss": 0.493, + "step": 6832 + }, + { + "epoch": 0.28195923083271435, + "grad_norm": 9.805848548300892, + "learning_rate": 2.527778947230765e-06, + "loss": 0.5869, + "step": 6833 + }, + { + "epoch": 0.2820004951720723, + "grad_norm": 12.534110099605902, + "learning_rate": 2.5276329177103943e-06, + "loss": 0.5275, + "step": 6834 + }, + { + "epoch": 0.28204175951143023, + "grad_norm": 19.779014259279073, + "learning_rate": 2.52748686983393e-06, + "loss": 0.5501, + "step": 6835 + }, + { + "epoch": 0.28208302385078815, + "grad_norm": 2.5986252918375317, + "learning_rate": 2.527340803603979e-06, + "loss": 0.5326, + "step": 6836 + }, + { + "epoch": 0.28212428819014607, + "grad_norm": 4.1985189105357765, + "learning_rate": 2.5271947190231517e-06, + "loss": 0.6011, + "step": 6837 + }, + { + "epoch": 0.282165552529504, + "grad_norm": 2.496874824270655, + "learning_rate": 2.527048616094057e-06, + "loss": 0.5852, + "step": 6838 + }, + { + "epoch": 0.28220681686886195, + "grad_norm": 3.629203645638725, + "learning_rate": 2.5269024948193047e-06, + "loss": 0.5553, + "step": 6839 + }, + { + "epoch": 0.28224808120821987, + "grad_norm": 2.294964374191287, + "learning_rate": 2.5267563552015048e-06, + "loss": 0.5842, + "step": 6840 + }, + { + "epoch": 0.2822893455475778, + "grad_norm": 3.100536138831057, + "learning_rate": 2.526610197243268e-06, + "loss": 0.5083, + "step": 6841 + }, + { + "epoch": 0.2823306098869357, + "grad_norm": 7.248401877602076, + "learning_rate": 2.5264640209472054e-06, + "loss": 0.5404, + "step": 6842 + }, + { + "epoch": 0.2823718742262936, + "grad_norm": 5.002680820327735, + "learning_rate": 2.5263178263159267e-06, + "loss": 0.5782, + "step": 6843 + }, + { + "epoch": 0.2824131385656516, + "grad_norm": 2.147501435609967, + "learning_rate": 2.5261716133520446e-06, + "loss": 0.5092, + "step": 6844 + }, + { + "epoch": 0.2824544029050095, + "grad_norm": 2.5118766872979044, + "learning_rate": 2.5260253820581704e-06, + "loss": 0.5481, + "step": 6845 + }, + { + "epoch": 0.2824956672443674, + "grad_norm": 4.231876996198968, + "learning_rate": 2.525879132436916e-06, + "loss": 0.5674, + "step": 6846 + }, + { + "epoch": 0.28253693158372534, + "grad_norm": 2.086337200099307, + "learning_rate": 2.525732864490894e-06, + "loss": 0.5323, + "step": 6847 + }, + { + "epoch": 0.28257819592308325, + "grad_norm": 2.1587893246618677, + "learning_rate": 2.525586578222717e-06, + "loss": 0.5476, + "step": 6848 + }, + { + "epoch": 0.2826194602624412, + "grad_norm": 6.763564157006132, + "learning_rate": 2.525440273634998e-06, + "loss": 0.6069, + "step": 6849 + }, + { + "epoch": 0.28266072460179914, + "grad_norm": 7.157296063217537, + "learning_rate": 2.525293950730351e-06, + "loss": 0.5225, + "step": 6850 + }, + { + "epoch": 0.28270198894115706, + "grad_norm": 3.9018581217076416, + "learning_rate": 2.5251476095113885e-06, + "loss": 0.6505, + "step": 6851 + }, + { + "epoch": 0.282743253280515, + "grad_norm": 2.330594749621984, + "learning_rate": 2.5250012499807254e-06, + "loss": 0.5394, + "step": 6852 + }, + { + "epoch": 0.2827845176198729, + "grad_norm": 6.020924106015543, + "learning_rate": 2.5248548721409763e-06, + "loss": 0.5609, + "step": 6853 + }, + { + "epoch": 0.28282578195923086, + "grad_norm": 1.9822407079589117, + "learning_rate": 2.5247084759947543e-06, + "loss": 0.5429, + "step": 6854 + }, + { + "epoch": 0.2828670462985888, + "grad_norm": 4.62161724362881, + "learning_rate": 2.5245620615446763e-06, + "loss": 0.5386, + "step": 6855 + }, + { + "epoch": 0.2829083106379467, + "grad_norm": 5.712751218413484, + "learning_rate": 2.5244156287933568e-06, + "loss": 0.5575, + "step": 6856 + }, + { + "epoch": 0.2829495749773046, + "grad_norm": 2.4340200127812612, + "learning_rate": 2.524269177743411e-06, + "loss": 0.5996, + "step": 6857 + }, + { + "epoch": 0.2829908393166625, + "grad_norm": 11.040534012557323, + "learning_rate": 2.524122708397456e-06, + "loss": 0.5197, + "step": 6858 + }, + { + "epoch": 0.28303210365602044, + "grad_norm": 14.596236579101362, + "learning_rate": 2.523976220758107e-06, + "loss": 0.5136, + "step": 6859 + }, + { + "epoch": 0.2830733679953784, + "grad_norm": 4.007888204398318, + "learning_rate": 2.5238297148279814e-06, + "loss": 0.5844, + "step": 6860 + }, + { + "epoch": 0.2831146323347363, + "grad_norm": 2.557352340756402, + "learning_rate": 2.5236831906096955e-06, + "loss": 0.5389, + "step": 6861 + }, + { + "epoch": 0.28315589667409424, + "grad_norm": 2.3906626228701, + "learning_rate": 2.5235366481058673e-06, + "loss": 0.5209, + "step": 6862 + }, + { + "epoch": 0.28319716101345216, + "grad_norm": 8.56943963507252, + "learning_rate": 2.5233900873191137e-06, + "loss": 0.5082, + "step": 6863 + }, + { + "epoch": 0.2832384253528101, + "grad_norm": 3.9439346574080574, + "learning_rate": 2.523243508252053e-06, + "loss": 0.465, + "step": 6864 + }, + { + "epoch": 0.28327968969216805, + "grad_norm": 4.309147452394074, + "learning_rate": 2.5230969109073045e-06, + "loss": 0.5778, + "step": 6865 + }, + { + "epoch": 0.28332095403152596, + "grad_norm": 2.595154522265607, + "learning_rate": 2.5229502952874847e-06, + "loss": 0.498, + "step": 6866 + }, + { + "epoch": 0.2833622183708839, + "grad_norm": 4.937084869179892, + "learning_rate": 2.5228036613952136e-06, + "loss": 0.5606, + "step": 6867 + }, + { + "epoch": 0.2834034827102418, + "grad_norm": 5.430033254345743, + "learning_rate": 2.522657009233111e-06, + "loss": 0.5026, + "step": 6868 + }, + { + "epoch": 0.2834447470495997, + "grad_norm": 2.6722115863966143, + "learning_rate": 2.5225103388037953e-06, + "loss": 0.5664, + "step": 6869 + }, + { + "epoch": 0.2834860113889577, + "grad_norm": 2.9913746108987063, + "learning_rate": 2.5223636501098875e-06, + "loss": 0.4817, + "step": 6870 + }, + { + "epoch": 0.2835272757283156, + "grad_norm": 2.5908764986498105, + "learning_rate": 2.522216943154007e-06, + "loss": 0.5515, + "step": 6871 + }, + { + "epoch": 0.2835685400676735, + "grad_norm": 3.704780987427893, + "learning_rate": 2.5220702179387753e-06, + "loss": 0.5728, + "step": 6872 + }, + { + "epoch": 0.28360980440703143, + "grad_norm": 4.729633613729754, + "learning_rate": 2.5219234744668123e-06, + "loss": 0.5365, + "step": 6873 + }, + { + "epoch": 0.28365106874638935, + "grad_norm": 3.026137267464538, + "learning_rate": 2.521776712740739e-06, + "loss": 0.534, + "step": 6874 + }, + { + "epoch": 0.2836923330857473, + "grad_norm": 3.1406222169083104, + "learning_rate": 2.5216299327631787e-06, + "loss": 0.5425, + "step": 6875 + }, + { + "epoch": 0.28373359742510523, + "grad_norm": 4.2594049357422445, + "learning_rate": 2.521483134536751e-06, + "loss": 0.567, + "step": 6876 + }, + { + "epoch": 0.28377486176446315, + "grad_norm": 11.357113331022358, + "learning_rate": 2.5213363180640795e-06, + "loss": 0.5189, + "step": 6877 + }, + { + "epoch": 0.28381612610382106, + "grad_norm": 2.5038502483532077, + "learning_rate": 2.5211894833477872e-06, + "loss": 0.5474, + "step": 6878 + }, + { + "epoch": 0.283857390443179, + "grad_norm": 3.2667238853636635, + "learning_rate": 2.5210426303904953e-06, + "loss": 0.5026, + "step": 6879 + }, + { + "epoch": 0.28389865478253695, + "grad_norm": 2.925116869917728, + "learning_rate": 2.520895759194828e-06, + "loss": 0.5634, + "step": 6880 + }, + { + "epoch": 0.28393991912189487, + "grad_norm": 3.9738584807995085, + "learning_rate": 2.5207488697634083e-06, + "loss": 0.5482, + "step": 6881 + }, + { + "epoch": 0.2839811834612528, + "grad_norm": 2.4223139397794995, + "learning_rate": 2.5206019620988604e-06, + "loss": 0.5099, + "step": 6882 + }, + { + "epoch": 0.2840224478006107, + "grad_norm": 3.135498919902313, + "learning_rate": 2.5204550362038078e-06, + "loss": 0.562, + "step": 6883 + }, + { + "epoch": 0.2840637121399686, + "grad_norm": 3.0124698001118535, + "learning_rate": 2.5203080920808764e-06, + "loss": 0.5195, + "step": 6884 + }, + { + "epoch": 0.2841049764793266, + "grad_norm": 3.834791102036933, + "learning_rate": 2.5201611297326894e-06, + "loss": 0.4985, + "step": 6885 + }, + { + "epoch": 0.2841462408186845, + "grad_norm": 1.9760973539300803, + "learning_rate": 2.5200141491618726e-06, + "loss": 0.4914, + "step": 6886 + }, + { + "epoch": 0.2841875051580424, + "grad_norm": 2.3608413353893196, + "learning_rate": 2.5198671503710515e-06, + "loss": 0.558, + "step": 6887 + }, + { + "epoch": 0.28422876949740034, + "grad_norm": 8.124929139444077, + "learning_rate": 2.519720133362852e-06, + "loss": 0.562, + "step": 6888 + }, + { + "epoch": 0.28427003383675825, + "grad_norm": 6.083951780242758, + "learning_rate": 2.5195730981399e-06, + "loss": 0.5779, + "step": 6889 + }, + { + "epoch": 0.2843112981761162, + "grad_norm": 3.407045972019597, + "learning_rate": 2.5194260447048216e-06, + "loss": 0.5612, + "step": 6890 + }, + { + "epoch": 0.28435256251547414, + "grad_norm": 1.80155154792502, + "learning_rate": 2.5192789730602435e-06, + "loss": 0.5839, + "step": 6891 + }, + { + "epoch": 0.28439382685483205, + "grad_norm": 5.5328372604330465, + "learning_rate": 2.519131883208794e-06, + "loss": 0.4923, + "step": 6892 + }, + { + "epoch": 0.28443509119418997, + "grad_norm": 4.990374273726269, + "learning_rate": 2.5189847751530994e-06, + "loss": 0.5277, + "step": 6893 + }, + { + "epoch": 0.2844763555335479, + "grad_norm": 3.913758211276409, + "learning_rate": 2.5188376488957866e-06, + "loss": 0.5487, + "step": 6894 + }, + { + "epoch": 0.28451761987290586, + "grad_norm": 3.110733352201925, + "learning_rate": 2.5186905044394856e-06, + "loss": 0.5027, + "step": 6895 + }, + { + "epoch": 0.2845588842122638, + "grad_norm": 3.1920815959724487, + "learning_rate": 2.518543341786824e-06, + "loss": 0.5067, + "step": 6896 + }, + { + "epoch": 0.2846001485516217, + "grad_norm": 6.047502000138758, + "learning_rate": 2.5183961609404297e-06, + "loss": 0.5537, + "step": 6897 + }, + { + "epoch": 0.2846414128909796, + "grad_norm": 2.4094934118834157, + "learning_rate": 2.5182489619029324e-06, + "loss": 0.4963, + "step": 6898 + }, + { + "epoch": 0.2846826772303375, + "grad_norm": 2.4340988401066346, + "learning_rate": 2.5181017446769615e-06, + "loss": 0.5255, + "step": 6899 + }, + { + "epoch": 0.2847239415696955, + "grad_norm": 7.314493004004031, + "learning_rate": 2.5179545092651466e-06, + "loss": 0.5006, + "step": 6900 + }, + { + "epoch": 0.2847652059090534, + "grad_norm": 3.0422029402918747, + "learning_rate": 2.5178072556701174e-06, + "loss": 0.5254, + "step": 6901 + }, + { + "epoch": 0.2848064702484113, + "grad_norm": 15.451178015477684, + "learning_rate": 2.5176599838945047e-06, + "loss": 0.4853, + "step": 6902 + }, + { + "epoch": 0.28484773458776924, + "grad_norm": 16.107739740011613, + "learning_rate": 2.517512693940939e-06, + "loss": 0.5188, + "step": 6903 + }, + { + "epoch": 0.28488899892712716, + "grad_norm": 4.3878948698375595, + "learning_rate": 2.517365385812051e-06, + "loss": 0.527, + "step": 6904 + }, + { + "epoch": 0.28493026326648513, + "grad_norm": 2.8816997826308044, + "learning_rate": 2.5172180595104722e-06, + "loss": 0.5371, + "step": 6905 + }, + { + "epoch": 0.28497152760584304, + "grad_norm": 2.5316466349299667, + "learning_rate": 2.517070715038834e-06, + "loss": 0.4804, + "step": 6906 + }, + { + "epoch": 0.28501279194520096, + "grad_norm": 3.087902339963887, + "learning_rate": 2.5169233523997686e-06, + "loss": 0.5958, + "step": 6907 + }, + { + "epoch": 0.2850540562845589, + "grad_norm": 2.9982771030536206, + "learning_rate": 2.5167759715959084e-06, + "loss": 0.5311, + "step": 6908 + }, + { + "epoch": 0.2850953206239168, + "grad_norm": 4.687483916332704, + "learning_rate": 2.516628572629885e-06, + "loss": 0.5803, + "step": 6909 + }, + { + "epoch": 0.28513658496327476, + "grad_norm": 7.533099547757769, + "learning_rate": 2.5164811555043326e-06, + "loss": 0.523, + "step": 6910 + }, + { + "epoch": 0.2851778493026327, + "grad_norm": 2.577604507892814, + "learning_rate": 2.516333720221884e-06, + "loss": 0.4786, + "step": 6911 + }, + { + "epoch": 0.2852191136419906, + "grad_norm": 5.457157058530745, + "learning_rate": 2.5161862667851725e-06, + "loss": 0.5361, + "step": 6912 + }, + { + "epoch": 0.2852603779813485, + "grad_norm": 2.6186229262377543, + "learning_rate": 2.5160387951968326e-06, + "loss": 0.5411, + "step": 6913 + }, + { + "epoch": 0.28530164232070643, + "grad_norm": 7.9930622676165175, + "learning_rate": 2.515891305459498e-06, + "loss": 0.5514, + "step": 6914 + }, + { + "epoch": 0.2853429066600644, + "grad_norm": 2.5721925424560785, + "learning_rate": 2.5157437975758027e-06, + "loss": 0.4903, + "step": 6915 + }, + { + "epoch": 0.2853841709994223, + "grad_norm": 3.424594956522648, + "learning_rate": 2.5155962715483825e-06, + "loss": 0.5489, + "step": 6916 + }, + { + "epoch": 0.28542543533878023, + "grad_norm": 3.0077717657701135, + "learning_rate": 2.5154487273798726e-06, + "loss": 0.6187, + "step": 6917 + }, + { + "epoch": 0.28546669967813815, + "grad_norm": 4.831742099780003, + "learning_rate": 2.5153011650729077e-06, + "loss": 0.4357, + "step": 6918 + }, + { + "epoch": 0.28550796401749606, + "grad_norm": 3.8423759215563273, + "learning_rate": 2.5151535846301246e-06, + "loss": 0.57, + "step": 6919 + }, + { + "epoch": 0.285549228356854, + "grad_norm": 1.8309909896778327, + "learning_rate": 2.5150059860541584e-06, + "loss": 0.4829, + "step": 6920 + }, + { + "epoch": 0.28559049269621195, + "grad_norm": 2.564591253211153, + "learning_rate": 2.514858369347646e-06, + "loss": 0.599, + "step": 6921 + }, + { + "epoch": 0.28563175703556987, + "grad_norm": 2.3048805505806347, + "learning_rate": 2.514710734513225e-06, + "loss": 0.577, + "step": 6922 + }, + { + "epoch": 0.2856730213749278, + "grad_norm": 3.9760224833467475, + "learning_rate": 2.5145630815535316e-06, + "loss": 0.5698, + "step": 6923 + }, + { + "epoch": 0.2857142857142857, + "grad_norm": 4.642237633774693, + "learning_rate": 2.5144154104712034e-06, + "loss": 0.5583, + "step": 6924 + }, + { + "epoch": 0.2857555500536436, + "grad_norm": 4.043052397487334, + "learning_rate": 2.5142677212688782e-06, + "loss": 0.5718, + "step": 6925 + }, + { + "epoch": 0.2857968143930016, + "grad_norm": 2.3327439651840476, + "learning_rate": 2.5141200139491943e-06, + "loss": 0.5365, + "step": 6926 + }, + { + "epoch": 0.2858380787323595, + "grad_norm": 3.8558536719924805, + "learning_rate": 2.51397228851479e-06, + "loss": 0.5362, + "step": 6927 + }, + { + "epoch": 0.2858793430717174, + "grad_norm": 2.213526757444367, + "learning_rate": 2.513824544968304e-06, + "loss": 0.4994, + "step": 6928 + }, + { + "epoch": 0.28592060741107533, + "grad_norm": 15.049134543586197, + "learning_rate": 2.513676783312376e-06, + "loss": 0.5457, + "step": 6929 + }, + { + "epoch": 0.28596187175043325, + "grad_norm": 3.092030718788539, + "learning_rate": 2.5135290035496444e-06, + "loss": 0.59, + "step": 6930 + }, + { + "epoch": 0.2860031360897912, + "grad_norm": 7.295060333981075, + "learning_rate": 2.513381205682749e-06, + "loss": 0.564, + "step": 6931 + }, + { + "epoch": 0.28604440042914914, + "grad_norm": 4.064924652174539, + "learning_rate": 2.5132333897143306e-06, + "loss": 0.5469, + "step": 6932 + }, + { + "epoch": 0.28608566476850705, + "grad_norm": 5.026210265868383, + "learning_rate": 2.513085555647029e-06, + "loss": 0.5634, + "step": 6933 + }, + { + "epoch": 0.28612692910786497, + "grad_norm": 2.4106985992772683, + "learning_rate": 2.512937703483485e-06, + "loss": 0.5381, + "step": 6934 + }, + { + "epoch": 0.2861681934472229, + "grad_norm": 2.0552749213408483, + "learning_rate": 2.5127898332263397e-06, + "loss": 0.5268, + "step": 6935 + }, + { + "epoch": 0.28620945778658086, + "grad_norm": 5.2975563741145, + "learning_rate": 2.5126419448782347e-06, + "loss": 0.5251, + "step": 6936 + }, + { + "epoch": 0.2862507221259388, + "grad_norm": 3.122953597670944, + "learning_rate": 2.5124940384418105e-06, + "loss": 0.6115, + "step": 6937 + }, + { + "epoch": 0.2862919864652967, + "grad_norm": 3.5499328146351643, + "learning_rate": 2.5123461139197104e-06, + "loss": 0.587, + "step": 6938 + }, + { + "epoch": 0.2863332508046546, + "grad_norm": 2.225313534783191, + "learning_rate": 2.5121981713145763e-06, + "loss": 0.6058, + "step": 6939 + }, + { + "epoch": 0.2863745151440125, + "grad_norm": 2.658141515763437, + "learning_rate": 2.5120502106290504e-06, + "loss": 0.592, + "step": 6940 + }, + { + "epoch": 0.2864157794833705, + "grad_norm": 3.382411174637627, + "learning_rate": 2.511902231865776e-06, + "loss": 0.5573, + "step": 6941 + }, + { + "epoch": 0.2864570438227284, + "grad_norm": 20.56756341475476, + "learning_rate": 2.511754235027397e-06, + "loss": 0.5171, + "step": 6942 + }, + { + "epoch": 0.2864983081620863, + "grad_norm": 2.024579365881667, + "learning_rate": 2.511606220116556e-06, + "loss": 0.5332, + "step": 6943 + }, + { + "epoch": 0.28653957250144424, + "grad_norm": 5.805187837453494, + "learning_rate": 2.5114581871358968e-06, + "loss": 0.5297, + "step": 6944 + }, + { + "epoch": 0.28658083684080216, + "grad_norm": 2.8174045089049606, + "learning_rate": 2.5113101360880647e-06, + "loss": 0.5744, + "step": 6945 + }, + { + "epoch": 0.2866221011801601, + "grad_norm": 3.723659537294459, + "learning_rate": 2.5111620669757028e-06, + "loss": 0.4789, + "step": 6946 + }, + { + "epoch": 0.28666336551951804, + "grad_norm": 3.0686354133911706, + "learning_rate": 2.5110139798014577e-06, + "loss": 0.5408, + "step": 6947 + }, + { + "epoch": 0.28670462985887596, + "grad_norm": 5.830645642969896, + "learning_rate": 2.5108658745679733e-06, + "loss": 0.5716, + "step": 6948 + }, + { + "epoch": 0.2867458941982339, + "grad_norm": 13.06154312703317, + "learning_rate": 2.5107177512778955e-06, + "loss": 0.5325, + "step": 6949 + }, + { + "epoch": 0.2867871585375918, + "grad_norm": 4.990701290590635, + "learning_rate": 2.510569609933871e-06, + "loss": 0.4734, + "step": 6950 + }, + { + "epoch": 0.28682842287694976, + "grad_norm": 7.641438943684229, + "learning_rate": 2.5104214505385446e-06, + "loss": 0.4852, + "step": 6951 + }, + { + "epoch": 0.2868696872163077, + "grad_norm": 3.179413871181599, + "learning_rate": 2.5102732730945635e-06, + "loss": 0.5452, + "step": 6952 + }, + { + "epoch": 0.2869109515556656, + "grad_norm": 2.8431045018771877, + "learning_rate": 2.510125077604574e-06, + "loss": 0.5702, + "step": 6953 + }, + { + "epoch": 0.2869522158950235, + "grad_norm": 3.7586268529748446, + "learning_rate": 2.5099768640712245e-06, + "loss": 0.5408, + "step": 6954 + }, + { + "epoch": 0.2869934802343814, + "grad_norm": 3.2570721780195133, + "learning_rate": 2.5098286324971615e-06, + "loss": 0.5018, + "step": 6955 + }, + { + "epoch": 0.2870347445737394, + "grad_norm": 3.376365208374224, + "learning_rate": 2.509680382885032e-06, + "loss": 0.5428, + "step": 6956 + }, + { + "epoch": 0.2870760089130973, + "grad_norm": 4.9949445050370205, + "learning_rate": 2.509532115237486e-06, + "loss": 0.5127, + "step": 6957 + }, + { + "epoch": 0.28711727325245523, + "grad_norm": 2.3648631921898144, + "learning_rate": 2.509383829557171e-06, + "loss": 0.5489, + "step": 6958 + }, + { + "epoch": 0.28715853759181315, + "grad_norm": 4.013441561776908, + "learning_rate": 2.5092355258467353e-06, + "loss": 0.5572, + "step": 6959 + }, + { + "epoch": 0.28719980193117106, + "grad_norm": 2.56268834853794, + "learning_rate": 2.509087204108828e-06, + "loss": 0.5472, + "step": 6960 + }, + { + "epoch": 0.28724106627052903, + "grad_norm": 3.6008342567071754, + "learning_rate": 2.5089388643460995e-06, + "loss": 0.5067, + "step": 6961 + }, + { + "epoch": 0.28728233060988695, + "grad_norm": 3.7050458193944342, + "learning_rate": 2.5087905065611984e-06, + "loss": 0.5421, + "step": 6962 + }, + { + "epoch": 0.28732359494924486, + "grad_norm": 2.374259128412532, + "learning_rate": 2.508642130756776e-06, + "loss": 0.5228, + "step": 6963 + }, + { + "epoch": 0.2873648592886028, + "grad_norm": 3.730824027443866, + "learning_rate": 2.5084937369354813e-06, + "loss": 0.6101, + "step": 6964 + }, + { + "epoch": 0.2874061236279607, + "grad_norm": 3.984677247420699, + "learning_rate": 2.5083453250999653e-06, + "loss": 0.5686, + "step": 6965 + }, + { + "epoch": 0.28744738796731867, + "grad_norm": 2.3584834972770263, + "learning_rate": 2.5081968952528802e-06, + "loss": 0.5837, + "step": 6966 + }, + { + "epoch": 0.2874886523066766, + "grad_norm": 2.7404020154964717, + "learning_rate": 2.508048447396876e-06, + "loss": 0.6137, + "step": 6967 + }, + { + "epoch": 0.2875299166460345, + "grad_norm": 11.195809519452373, + "learning_rate": 2.507899981534605e-06, + "loss": 0.5736, + "step": 6968 + }, + { + "epoch": 0.2875711809853924, + "grad_norm": 4.540478027930609, + "learning_rate": 2.507751497668718e-06, + "loss": 0.5644, + "step": 6969 + }, + { + "epoch": 0.28761244532475033, + "grad_norm": 7.7336318607811645, + "learning_rate": 2.5076029958018695e-06, + "loss": 0.5739, + "step": 6970 + }, + { + "epoch": 0.2876537096641083, + "grad_norm": 2.152110821085564, + "learning_rate": 2.5074544759367103e-06, + "loss": 0.5114, + "step": 6971 + }, + { + "epoch": 0.2876949740034662, + "grad_norm": 6.9760009967636085, + "learning_rate": 2.507305938075894e-06, + "loss": 0.5283, + "step": 6972 + }, + { + "epoch": 0.28773623834282414, + "grad_norm": 2.1125374137926034, + "learning_rate": 2.5071573822220735e-06, + "loss": 0.5671, + "step": 6973 + }, + { + "epoch": 0.28777750268218205, + "grad_norm": 20.649111505233492, + "learning_rate": 2.507008808377903e-06, + "loss": 0.5428, + "step": 6974 + }, + { + "epoch": 0.28781876702153997, + "grad_norm": 3.1155297668479203, + "learning_rate": 2.5068602165460357e-06, + "loss": 0.5722, + "step": 6975 + }, + { + "epoch": 0.28786003136089794, + "grad_norm": 3.2579072021158177, + "learning_rate": 2.5067116067291264e-06, + "loss": 0.5551, + "step": 6976 + }, + { + "epoch": 0.28790129570025585, + "grad_norm": 26.031985484135962, + "learning_rate": 2.506562978929829e-06, + "loss": 0.5309, + "step": 6977 + }, + { + "epoch": 0.28794256003961377, + "grad_norm": 2.8098510370659304, + "learning_rate": 2.5064143331507994e-06, + "loss": 0.5234, + "step": 6978 + }, + { + "epoch": 0.2879838243789717, + "grad_norm": 2.715453292026713, + "learning_rate": 2.5062656693946914e-06, + "loss": 0.5616, + "step": 6979 + }, + { + "epoch": 0.2880250887183296, + "grad_norm": 2.9943560120065893, + "learning_rate": 2.506116987664162e-06, + "loss": 0.5824, + "step": 6980 + }, + { + "epoch": 0.2880663530576876, + "grad_norm": 14.176291248397915, + "learning_rate": 2.5059682879618657e-06, + "loss": 0.5196, + "step": 6981 + }, + { + "epoch": 0.2881076173970455, + "grad_norm": 2.4772305401352424, + "learning_rate": 2.5058195702904593e-06, + "loss": 0.5063, + "step": 6982 + }, + { + "epoch": 0.2881488817364034, + "grad_norm": 34.01418753703428, + "learning_rate": 2.5056708346525998e-06, + "loss": 0.5862, + "step": 6983 + }, + { + "epoch": 0.2881901460757613, + "grad_norm": 2.323608585408341, + "learning_rate": 2.505522081050943e-06, + "loss": 0.576, + "step": 6984 + }, + { + "epoch": 0.28823141041511924, + "grad_norm": 2.9611725723669213, + "learning_rate": 2.5053733094881458e-06, + "loss": 0.5475, + "step": 6985 + }, + { + "epoch": 0.28827267475447715, + "grad_norm": 2.7096542286688075, + "learning_rate": 2.505224519966867e-06, + "loss": 0.4935, + "step": 6986 + }, + { + "epoch": 0.2883139390938351, + "grad_norm": 2.480346188005827, + "learning_rate": 2.5050757124897637e-06, + "loss": 0.5336, + "step": 6987 + }, + { + "epoch": 0.28835520343319304, + "grad_norm": 11.909372220909836, + "learning_rate": 2.5049268870594932e-06, + "loss": 0.5655, + "step": 6988 + }, + { + "epoch": 0.28839646777255096, + "grad_norm": 2.081267270958502, + "learning_rate": 2.504778043678715e-06, + "loss": 0.5556, + "step": 6989 + }, + { + "epoch": 0.2884377321119089, + "grad_norm": 3.734494746580625, + "learning_rate": 2.504629182350087e-06, + "loss": 0.5558, + "step": 6990 + }, + { + "epoch": 0.2884789964512668, + "grad_norm": 2.819865386720614, + "learning_rate": 2.504480303076269e-06, + "loss": 0.5539, + "step": 6991 + }, + { + "epoch": 0.28852026079062476, + "grad_norm": 2.997617073829542, + "learning_rate": 2.50433140585992e-06, + "loss": 0.5312, + "step": 6992 + }, + { + "epoch": 0.2885615251299827, + "grad_norm": 2.4053450626685255, + "learning_rate": 2.504182490703699e-06, + "loss": 0.5734, + "step": 6993 + }, + { + "epoch": 0.2886027894693406, + "grad_norm": 3.899027221563978, + "learning_rate": 2.5040335576102675e-06, + "loss": 0.5359, + "step": 6994 + }, + { + "epoch": 0.2886440538086985, + "grad_norm": 6.032446294000388, + "learning_rate": 2.503884606582284e-06, + "loss": 0.556, + "step": 6995 + }, + { + "epoch": 0.2886853181480564, + "grad_norm": 3.7708420112060077, + "learning_rate": 2.503735637622411e-06, + "loss": 0.4815, + "step": 6996 + }, + { + "epoch": 0.2887265824874144, + "grad_norm": 3.5616820136121428, + "learning_rate": 2.5035866507333083e-06, + "loss": 0.5189, + "step": 6997 + }, + { + "epoch": 0.2887678468267723, + "grad_norm": 4.494318585569203, + "learning_rate": 2.5034376459176375e-06, + "loss": 0.5237, + "step": 6998 + }, + { + "epoch": 0.28880911116613023, + "grad_norm": 14.589879279651639, + "learning_rate": 2.5032886231780596e-06, + "loss": 0.5313, + "step": 6999 + }, + { + "epoch": 0.28885037550548814, + "grad_norm": 5.128919265653053, + "learning_rate": 2.5031395825172373e-06, + "loss": 0.5425, + "step": 7000 + }, + { + "epoch": 0.28889163984484606, + "grad_norm": 3.226467946854008, + "learning_rate": 2.502990523937833e-06, + "loss": 0.5437, + "step": 7001 + }, + { + "epoch": 0.28893290418420403, + "grad_norm": 2.8201829560469713, + "learning_rate": 2.5028414474425083e-06, + "loss": 0.5495, + "step": 7002 + }, + { + "epoch": 0.28897416852356195, + "grad_norm": 12.673154292425645, + "learning_rate": 2.5026923530339274e-06, + "loss": 0.5561, + "step": 7003 + }, + { + "epoch": 0.28901543286291986, + "grad_norm": 4.903315535248155, + "learning_rate": 2.5025432407147517e-06, + "loss": 0.531, + "step": 7004 + }, + { + "epoch": 0.2890566972022778, + "grad_norm": 3.0149331524349585, + "learning_rate": 2.5023941104876466e-06, + "loss": 0.4945, + "step": 7005 + }, + { + "epoch": 0.2890979615416357, + "grad_norm": 2.99005807490388, + "learning_rate": 2.5022449623552746e-06, + "loss": 0.5288, + "step": 7006 + }, + { + "epoch": 0.28913922588099367, + "grad_norm": 5.46710692715014, + "learning_rate": 2.5020957963203e-06, + "loss": 0.5297, + "step": 7007 + }, + { + "epoch": 0.2891804902203516, + "grad_norm": 2.6615084847520927, + "learning_rate": 2.5019466123853884e-06, + "loss": 0.565, + "step": 7008 + }, + { + "epoch": 0.2892217545597095, + "grad_norm": 2.905186599199736, + "learning_rate": 2.501797410553203e-06, + "loss": 0.5099, + "step": 7009 + }, + { + "epoch": 0.2892630188990674, + "grad_norm": 5.235900604791722, + "learning_rate": 2.5016481908264107e-06, + "loss": 0.5438, + "step": 7010 + }, + { + "epoch": 0.28930428323842533, + "grad_norm": 6.163226392962435, + "learning_rate": 2.5014989532076756e-06, + "loss": 0.5913, + "step": 7011 + }, + { + "epoch": 0.2893455475777833, + "grad_norm": 3.174055157879831, + "learning_rate": 2.5013496976996635e-06, + "loss": 0.5695, + "step": 7012 + }, + { + "epoch": 0.2893868119171412, + "grad_norm": 5.228754690916098, + "learning_rate": 2.501200424305041e-06, + "loss": 0.5328, + "step": 7013 + }, + { + "epoch": 0.28942807625649913, + "grad_norm": 3.9301061639761836, + "learning_rate": 2.501051133026474e-06, + "loss": 0.4718, + "step": 7014 + }, + { + "epoch": 0.28946934059585705, + "grad_norm": 2.1411921598383783, + "learning_rate": 2.5009018238666303e-06, + "loss": 0.5614, + "step": 7015 + }, + { + "epoch": 0.28951060493521497, + "grad_norm": 2.4036683300097286, + "learning_rate": 2.5007524968281758e-06, + "loss": 0.5106, + "step": 7016 + }, + { + "epoch": 0.28955186927457294, + "grad_norm": 5.079418288312653, + "learning_rate": 2.500603151913778e-06, + "loss": 0.5488, + "step": 7017 + }, + { + "epoch": 0.28959313361393085, + "grad_norm": 2.9414808427215116, + "learning_rate": 2.5004537891261046e-06, + "loss": 0.5917, + "step": 7018 + }, + { + "epoch": 0.28963439795328877, + "grad_norm": 3.8893508482650936, + "learning_rate": 2.500304408467824e-06, + "loss": 0.5391, + "step": 7019 + }, + { + "epoch": 0.2896756622926467, + "grad_norm": 3.1789157387807485, + "learning_rate": 2.5001550099416046e-06, + "loss": 0.5667, + "step": 7020 + }, + { + "epoch": 0.2897169266320046, + "grad_norm": 4.444130689418715, + "learning_rate": 2.500005593550114e-06, + "loss": 0.5458, + "step": 7021 + }, + { + "epoch": 0.2897581909713626, + "grad_norm": 10.922086970994542, + "learning_rate": 2.4998561592960226e-06, + "loss": 0.6218, + "step": 7022 + }, + { + "epoch": 0.2897994553107205, + "grad_norm": 4.408902392218604, + "learning_rate": 2.499706707181999e-06, + "loss": 0.5755, + "step": 7023 + }, + { + "epoch": 0.2898407196500784, + "grad_norm": 2.4199400256801034, + "learning_rate": 2.499557237210712e-06, + "loss": 0.5392, + "step": 7024 + }, + { + "epoch": 0.2898819839894363, + "grad_norm": 8.547221593877346, + "learning_rate": 2.4994077493848327e-06, + "loss": 0.537, + "step": 7025 + }, + { + "epoch": 0.28992324832879424, + "grad_norm": 2.6552821099874895, + "learning_rate": 2.4992582437070305e-06, + "loss": 0.497, + "step": 7026 + }, + { + "epoch": 0.2899645126681522, + "grad_norm": 2.5566192621463055, + "learning_rate": 2.4991087201799765e-06, + "loss": 0.5059, + "step": 7027 + }, + { + "epoch": 0.2900057770075101, + "grad_norm": 5.453074821168341, + "learning_rate": 2.498959178806342e-06, + "loss": 0.5753, + "step": 7028 + }, + { + "epoch": 0.29004704134686804, + "grad_norm": 3.3126255523415504, + "learning_rate": 2.4988096195887967e-06, + "loss": 0.5245, + "step": 7029 + }, + { + "epoch": 0.29008830568622596, + "grad_norm": 2.890504615553731, + "learning_rate": 2.498660042530013e-06, + "loss": 0.5108, + "step": 7030 + }, + { + "epoch": 0.29012957002558387, + "grad_norm": 4.784085987026722, + "learning_rate": 2.4985104476326635e-06, + "loss": 0.5788, + "step": 7031 + }, + { + "epoch": 0.29017083436494184, + "grad_norm": 14.316410426188284, + "learning_rate": 2.4983608348994186e-06, + "loss": 0.5615, + "step": 7032 + }, + { + "epoch": 0.29021209870429976, + "grad_norm": 6.226789978437016, + "learning_rate": 2.498211204332952e-06, + "loss": 0.5753, + "step": 7033 + }, + { + "epoch": 0.2902533630436577, + "grad_norm": 4.426760031386365, + "learning_rate": 2.4980615559359365e-06, + "loss": 0.5426, + "step": 7034 + }, + { + "epoch": 0.2902946273830156, + "grad_norm": 13.47633365304632, + "learning_rate": 2.4979118897110444e-06, + "loss": 0.5864, + "step": 7035 + }, + { + "epoch": 0.2903358917223735, + "grad_norm": 5.4614151618891995, + "learning_rate": 2.4977622056609496e-06, + "loss": 0.5266, + "step": 7036 + }, + { + "epoch": 0.2903771560617315, + "grad_norm": 6.6316701976834596, + "learning_rate": 2.497612503788326e-06, + "loss": 0.5574, + "step": 7037 + }, + { + "epoch": 0.2904184204010894, + "grad_norm": 3.5001726013114274, + "learning_rate": 2.497462784095847e-06, + "loss": 0.5179, + "step": 7038 + }, + { + "epoch": 0.2904596847404473, + "grad_norm": 2.6121055039320655, + "learning_rate": 2.497313046586188e-06, + "loss": 0.549, + "step": 7039 + }, + { + "epoch": 0.2905009490798052, + "grad_norm": 2.528545823340896, + "learning_rate": 2.497163291262023e-06, + "loss": 0.595, + "step": 7040 + }, + { + "epoch": 0.29054221341916314, + "grad_norm": 5.47197138047381, + "learning_rate": 2.4970135181260267e-06, + "loss": 0.5228, + "step": 7041 + }, + { + "epoch": 0.2905834777585211, + "grad_norm": 2.81324851195535, + "learning_rate": 2.496863727180875e-06, + "loss": 0.5551, + "step": 7042 + }, + { + "epoch": 0.29062474209787903, + "grad_norm": 6.647355488425533, + "learning_rate": 2.4967139184292433e-06, + "loss": 0.5647, + "step": 7043 + }, + { + "epoch": 0.29066600643723695, + "grad_norm": 3.774217520447163, + "learning_rate": 2.496564091873808e-06, + "loss": 0.5203, + "step": 7044 + }, + { + "epoch": 0.29070727077659486, + "grad_norm": 2.795294246258938, + "learning_rate": 2.4964142475172445e-06, + "loss": 0.557, + "step": 7045 + }, + { + "epoch": 0.2907485351159528, + "grad_norm": 2.378083963908965, + "learning_rate": 2.49626438536223e-06, + "loss": 0.5416, + "step": 7046 + }, + { + "epoch": 0.2907897994553107, + "grad_norm": 3.712339928873397, + "learning_rate": 2.4961145054114417e-06, + "loss": 0.5403, + "step": 7047 + }, + { + "epoch": 0.29083106379466866, + "grad_norm": 2.6404101555737456, + "learning_rate": 2.495964607667556e-06, + "loss": 0.6029, + "step": 7048 + }, + { + "epoch": 0.2908723281340266, + "grad_norm": 2.766940495837291, + "learning_rate": 2.495814692133251e-06, + "loss": 0.5252, + "step": 7049 + }, + { + "epoch": 0.2909135924733845, + "grad_norm": 2.3150321670143734, + "learning_rate": 2.4956647588112047e-06, + "loss": 0.5009, + "step": 7050 + }, + { + "epoch": 0.2909548568127424, + "grad_norm": 6.734814016849625, + "learning_rate": 2.4955148077040946e-06, + "loss": 0.5192, + "step": 7051 + }, + { + "epoch": 0.29099612115210033, + "grad_norm": 2.4947920009698743, + "learning_rate": 2.4953648388145996e-06, + "loss": 0.55, + "step": 7052 + }, + { + "epoch": 0.2910373854914583, + "grad_norm": 3.684270026846091, + "learning_rate": 2.4952148521453986e-06, + "loss": 0.5662, + "step": 7053 + }, + { + "epoch": 0.2910786498308162, + "grad_norm": 7.663963430465852, + "learning_rate": 2.4950648476991708e-06, + "loss": 0.5431, + "step": 7054 + }, + { + "epoch": 0.29111991417017413, + "grad_norm": 1.8630091748690643, + "learning_rate": 2.494914825478596e-06, + "loss": 0.5063, + "step": 7055 + }, + { + "epoch": 0.29116117850953205, + "grad_norm": 10.463366696875804, + "learning_rate": 2.4947647854863535e-06, + "loss": 0.4621, + "step": 7056 + }, + { + "epoch": 0.29120244284888996, + "grad_norm": 4.266768477500153, + "learning_rate": 2.494614727725123e-06, + "loss": 0.53, + "step": 7057 + }, + { + "epoch": 0.29124370718824794, + "grad_norm": 31.043350412368383, + "learning_rate": 2.4944646521975854e-06, + "loss": 0.5021, + "step": 7058 + }, + { + "epoch": 0.29128497152760585, + "grad_norm": 3.8235251992697212, + "learning_rate": 2.494314558906421e-06, + "loss": 0.6034, + "step": 7059 + }, + { + "epoch": 0.29132623586696377, + "grad_norm": 3.350953665721617, + "learning_rate": 2.4941644478543116e-06, + "loss": 0.5363, + "step": 7060 + }, + { + "epoch": 0.2913675002063217, + "grad_norm": 87.08972152480546, + "learning_rate": 2.4940143190439383e-06, + "loss": 0.5761, + "step": 7061 + }, + { + "epoch": 0.2914087645456796, + "grad_norm": 3.25170346587942, + "learning_rate": 2.493864172477982e-06, + "loss": 0.5617, + "step": 7062 + }, + { + "epoch": 0.29145002888503757, + "grad_norm": 128.47012512785213, + "learning_rate": 2.4937140081591257e-06, + "loss": 0.5555, + "step": 7063 + }, + { + "epoch": 0.2914912932243955, + "grad_norm": 2.731088809058835, + "learning_rate": 2.493563826090052e-06, + "loss": 0.51, + "step": 7064 + }, + { + "epoch": 0.2915325575637534, + "grad_norm": 2.01191852076121, + "learning_rate": 2.4934136262734415e-06, + "loss": 0.5659, + "step": 7065 + }, + { + "epoch": 0.2915738219031113, + "grad_norm": 2.7371469466158116, + "learning_rate": 2.493263408711979e-06, + "loss": 0.4984, + "step": 7066 + }, + { + "epoch": 0.29161508624246923, + "grad_norm": 2.742304758745346, + "learning_rate": 2.4931131734083475e-06, + "loss": 0.5539, + "step": 7067 + }, + { + "epoch": 0.2916563505818272, + "grad_norm": 13.17142083502742, + "learning_rate": 2.4929629203652307e-06, + "loss": 0.5173, + "step": 7068 + }, + { + "epoch": 0.2916976149211851, + "grad_norm": 2.8627666855943077, + "learning_rate": 2.4928126495853113e-06, + "loss": 0.5358, + "step": 7069 + }, + { + "epoch": 0.29173887926054304, + "grad_norm": 3.615438039049405, + "learning_rate": 2.492662361071275e-06, + "loss": 0.5268, + "step": 7070 + }, + { + "epoch": 0.29178014359990095, + "grad_norm": 2.66810399876812, + "learning_rate": 2.4925120548258056e-06, + "loss": 0.5479, + "step": 7071 + }, + { + "epoch": 0.29182140793925887, + "grad_norm": 5.443614728093148, + "learning_rate": 2.4923617308515874e-06, + "loss": 0.5285, + "step": 7072 + }, + { + "epoch": 0.29186267227861684, + "grad_norm": 3.4652239043971473, + "learning_rate": 2.492211389151307e-06, + "loss": 0.5088, + "step": 7073 + }, + { + "epoch": 0.29190393661797476, + "grad_norm": 3.4498347071218904, + "learning_rate": 2.492061029727648e-06, + "loss": 0.5907, + "step": 7074 + }, + { + "epoch": 0.2919452009573327, + "grad_norm": 2.6617421312863048, + "learning_rate": 2.491910652583298e-06, + "loss": 0.5297, + "step": 7075 + }, + { + "epoch": 0.2919864652966906, + "grad_norm": 8.851657242373085, + "learning_rate": 2.4917602577209425e-06, + "loss": 0.544, + "step": 7076 + }, + { + "epoch": 0.2920277296360485, + "grad_norm": 18.185037054982466, + "learning_rate": 2.4916098451432676e-06, + "loss": 0.5981, + "step": 7077 + }, + { + "epoch": 0.2920689939754065, + "grad_norm": 2.704467729919341, + "learning_rate": 2.49145941485296e-06, + "loss": 0.5392, + "step": 7078 + }, + { + "epoch": 0.2921102583147644, + "grad_norm": 11.792723818884328, + "learning_rate": 2.491308966852707e-06, + "loss": 0.521, + "step": 7079 + }, + { + "epoch": 0.2921515226541223, + "grad_norm": 4.037409831541463, + "learning_rate": 2.4911585011451964e-06, + "loss": 0.5946, + "step": 7080 + }, + { + "epoch": 0.2921927869934802, + "grad_norm": 11.184053867084273, + "learning_rate": 2.491008017733115e-06, + "loss": 0.5569, + "step": 7081 + }, + { + "epoch": 0.29223405133283814, + "grad_norm": 3.4082453043263103, + "learning_rate": 2.490857516619152e-06, + "loss": 0.5239, + "step": 7082 + }, + { + "epoch": 0.2922753156721961, + "grad_norm": 3.832537098532091, + "learning_rate": 2.490706997805994e-06, + "loss": 0.5662, + "step": 7083 + }, + { + "epoch": 0.29231658001155403, + "grad_norm": 2.4565243620167543, + "learning_rate": 2.4905564612963318e-06, + "loss": 0.571, + "step": 7084 + }, + { + "epoch": 0.29235784435091194, + "grad_norm": 2.353259902797041, + "learning_rate": 2.4904059070928524e-06, + "loss": 0.5009, + "step": 7085 + }, + { + "epoch": 0.29239910869026986, + "grad_norm": 5.066030870044748, + "learning_rate": 2.490255335198246e-06, + "loss": 0.5527, + "step": 7086 + }, + { + "epoch": 0.2924403730296278, + "grad_norm": 3.756849369914702, + "learning_rate": 2.490104745615202e-06, + "loss": 0.4727, + "step": 7087 + }, + { + "epoch": 0.29248163736898575, + "grad_norm": 2.9600734304226135, + "learning_rate": 2.489954138346411e-06, + "loss": 0.5593, + "step": 7088 + }, + { + "epoch": 0.29252290170834366, + "grad_norm": 2.8223239570096483, + "learning_rate": 2.4898035133945617e-06, + "loss": 0.5701, + "step": 7089 + }, + { + "epoch": 0.2925641660477016, + "grad_norm": 2.6050154623882587, + "learning_rate": 2.489652870762346e-06, + "loss": 0.548, + "step": 7090 + }, + { + "epoch": 0.2926054303870595, + "grad_norm": 2.4945877334937134, + "learning_rate": 2.489502210452454e-06, + "loss": 0.555, + "step": 7091 + }, + { + "epoch": 0.2926466947264174, + "grad_norm": 2.24338169388316, + "learning_rate": 2.4893515324675776e-06, + "loss": 0.4743, + "step": 7092 + }, + { + "epoch": 0.2926879590657754, + "grad_norm": 2.8553193191766595, + "learning_rate": 2.489200836810408e-06, + "loss": 0.5434, + "step": 7093 + }, + { + "epoch": 0.2927292234051333, + "grad_norm": 4.09150447136664, + "learning_rate": 2.489050123483636e-06, + "loss": 0.5763, + "step": 7094 + }, + { + "epoch": 0.2927704877444912, + "grad_norm": 4.372837394942443, + "learning_rate": 2.4888993924899555e-06, + "loss": 0.5422, + "step": 7095 + }, + { + "epoch": 0.29281175208384913, + "grad_norm": 7.963436211389333, + "learning_rate": 2.4887486438320573e-06, + "loss": 0.5385, + "step": 7096 + }, + { + "epoch": 0.29285301642320705, + "grad_norm": 2.2102127080579064, + "learning_rate": 2.4885978775126354e-06, + "loss": 0.5328, + "step": 7097 + }, + { + "epoch": 0.292894280762565, + "grad_norm": 2.573654437535538, + "learning_rate": 2.488447093534382e-06, + "loss": 0.5359, + "step": 7098 + }, + { + "epoch": 0.29293554510192293, + "grad_norm": 4.295642256474564, + "learning_rate": 2.488296291899991e-06, + "loss": 0.5556, + "step": 7099 + }, + { + "epoch": 0.29297680944128085, + "grad_norm": 2.505611655396881, + "learning_rate": 2.4881454726121555e-06, + "loss": 0.6188, + "step": 7100 + }, + { + "epoch": 0.29301807378063877, + "grad_norm": 2.3659495976446263, + "learning_rate": 2.4879946356735706e-06, + "loss": 0.5216, + "step": 7101 + }, + { + "epoch": 0.2930593381199967, + "grad_norm": 3.647868019561457, + "learning_rate": 2.4878437810869293e-06, + "loss": 0.5467, + "step": 7102 + }, + { + "epoch": 0.29310060245935465, + "grad_norm": 5.067053835682175, + "learning_rate": 2.487692908854927e-06, + "loss": 0.5804, + "step": 7103 + }, + { + "epoch": 0.29314186679871257, + "grad_norm": 5.187247822653707, + "learning_rate": 2.4875420189802586e-06, + "loss": 0.5583, + "step": 7104 + }, + { + "epoch": 0.2931831311380705, + "grad_norm": 2.744892193926102, + "learning_rate": 2.4873911114656192e-06, + "loss": 0.5512, + "step": 7105 + }, + { + "epoch": 0.2932243954774284, + "grad_norm": 3.8739217854879984, + "learning_rate": 2.4872401863137045e-06, + "loss": 0.5132, + "step": 7106 + }, + { + "epoch": 0.2932656598167863, + "grad_norm": 3.2521299132315775, + "learning_rate": 2.4870892435272106e-06, + "loss": 0.556, + "step": 7107 + }, + { + "epoch": 0.29330692415614423, + "grad_norm": 2.4971450774921293, + "learning_rate": 2.4869382831088335e-06, + "loss": 0.5377, + "step": 7108 + }, + { + "epoch": 0.2933481884955022, + "grad_norm": 3.4666939835599435, + "learning_rate": 2.486787305061269e-06, + "loss": 0.534, + "step": 7109 + }, + { + "epoch": 0.2933894528348601, + "grad_norm": 3.738222278356452, + "learning_rate": 2.4866363093872155e-06, + "loss": 0.5674, + "step": 7110 + }, + { + "epoch": 0.29343071717421804, + "grad_norm": 3.6821783205204612, + "learning_rate": 2.486485296089369e-06, + "loss": 0.5411, + "step": 7111 + }, + { + "epoch": 0.29347198151357595, + "grad_norm": 3.208682153629872, + "learning_rate": 2.486334265170427e-06, + "loss": 0.548, + "step": 7112 + }, + { + "epoch": 0.29351324585293387, + "grad_norm": 1.8138118931187581, + "learning_rate": 2.486183216633088e-06, + "loss": 0.4848, + "step": 7113 + }, + { + "epoch": 0.29355451019229184, + "grad_norm": 3.074080639428051, + "learning_rate": 2.4860321504800495e-06, + "loss": 0.578, + "step": 7114 + }, + { + "epoch": 0.29359577453164976, + "grad_norm": 2.4268964443532313, + "learning_rate": 2.4858810667140106e-06, + "loss": 0.5136, + "step": 7115 + }, + { + "epoch": 0.29363703887100767, + "grad_norm": 2.1810765752334484, + "learning_rate": 2.485729965337669e-06, + "loss": 0.5673, + "step": 7116 + }, + { + "epoch": 0.2936783032103656, + "grad_norm": 6.747600580455317, + "learning_rate": 2.4855788463537244e-06, + "loss": 0.5407, + "step": 7117 + }, + { + "epoch": 0.2937195675497235, + "grad_norm": 2.511185754036146, + "learning_rate": 2.4854277097648762e-06, + "loss": 0.5273, + "step": 7118 + }, + { + "epoch": 0.2937608318890815, + "grad_norm": 3.972972609415001, + "learning_rate": 2.4852765555738237e-06, + "loss": 0.6011, + "step": 7119 + }, + { + "epoch": 0.2938020962284394, + "grad_norm": 2.211426123383428, + "learning_rate": 2.4851253837832675e-06, + "loss": 0.5333, + "step": 7120 + }, + { + "epoch": 0.2938433605677973, + "grad_norm": 2.741054657393104, + "learning_rate": 2.484974194395907e-06, + "loss": 0.5879, + "step": 7121 + }, + { + "epoch": 0.2938846249071552, + "grad_norm": 2.876551506658813, + "learning_rate": 2.4848229874144443e-06, + "loss": 0.5097, + "step": 7122 + }, + { + "epoch": 0.29392588924651314, + "grad_norm": 2.501661922372093, + "learning_rate": 2.484671762841579e-06, + "loss": 0.536, + "step": 7123 + }, + { + "epoch": 0.2939671535858711, + "grad_norm": 3.6483135684382044, + "learning_rate": 2.484520520680012e-06, + "loss": 0.4994, + "step": 7124 + }, + { + "epoch": 0.294008417925229, + "grad_norm": 13.062633428359955, + "learning_rate": 2.4843692609324462e-06, + "loss": 0.5425, + "step": 7125 + }, + { + "epoch": 0.29404968226458694, + "grad_norm": 3.5899406513736567, + "learning_rate": 2.4842179836015827e-06, + "loss": 0.524, + "step": 7126 + }, + { + "epoch": 0.29409094660394486, + "grad_norm": 8.519850740113993, + "learning_rate": 2.4840666886901243e-06, + "loss": 0.5377, + "step": 7127 + }, + { + "epoch": 0.2941322109433028, + "grad_norm": 9.014355574637483, + "learning_rate": 2.483915376200773e-06, + "loss": 0.5154, + "step": 7128 + }, + { + "epoch": 0.29417347528266075, + "grad_norm": 5.669876226944335, + "learning_rate": 2.4837640461362313e-06, + "loss": 0.5065, + "step": 7129 + }, + { + "epoch": 0.29421473962201866, + "grad_norm": 3.1367504586516444, + "learning_rate": 2.4836126984992026e-06, + "loss": 0.4632, + "step": 7130 + }, + { + "epoch": 0.2942560039613766, + "grad_norm": 6.87154490257487, + "learning_rate": 2.4834613332923914e-06, + "loss": 0.5474, + "step": 7131 + }, + { + "epoch": 0.2942972683007345, + "grad_norm": 6.239956928144196, + "learning_rate": 2.4833099505184997e-06, + "loss": 0.486, + "step": 7132 + }, + { + "epoch": 0.2943385326400924, + "grad_norm": 2.5223592019443304, + "learning_rate": 2.4831585501802325e-06, + "loss": 0.5255, + "step": 7133 + }, + { + "epoch": 0.2943797969794504, + "grad_norm": 6.8944930873470085, + "learning_rate": 2.483007132280295e-06, + "loss": 0.5924, + "step": 7134 + }, + { + "epoch": 0.2944210613188083, + "grad_norm": 8.662215607890142, + "learning_rate": 2.48285569682139e-06, + "loss": 0.5998, + "step": 7135 + }, + { + "epoch": 0.2944623256581662, + "grad_norm": 3.345113349249429, + "learning_rate": 2.4827042438062235e-06, + "loss": 0.6099, + "step": 7136 + }, + { + "epoch": 0.29450358999752413, + "grad_norm": 6.330985780741544, + "learning_rate": 2.482552773237501e-06, + "loss": 0.5326, + "step": 7137 + }, + { + "epoch": 0.29454485433688204, + "grad_norm": 4.791184270135699, + "learning_rate": 2.4824012851179286e-06, + "loss": 0.4929, + "step": 7138 + }, + { + "epoch": 0.29458611867624, + "grad_norm": 3.514324509291512, + "learning_rate": 2.482249779450211e-06, + "loss": 0.5843, + "step": 7139 + }, + { + "epoch": 0.29462738301559793, + "grad_norm": 5.337065560135965, + "learning_rate": 2.4820982562370552e-06, + "loss": 0.7037, + "step": 7140 + }, + { + "epoch": 0.29466864735495585, + "grad_norm": 6.9194202427040175, + "learning_rate": 2.481946715481168e-06, + "loss": 0.5204, + "step": 7141 + }, + { + "epoch": 0.29470991169431376, + "grad_norm": 3.6878894915088125, + "learning_rate": 2.481795157185256e-06, + "loss": 0.5413, + "step": 7142 + }, + { + "epoch": 0.2947511760336717, + "grad_norm": 4.383470014668349, + "learning_rate": 2.4816435813520266e-06, + "loss": 0.5158, + "step": 7143 + }, + { + "epoch": 0.29479244037302965, + "grad_norm": 4.748841715715173, + "learning_rate": 2.4814919879841864e-06, + "loss": 0.5254, + "step": 7144 + }, + { + "epoch": 0.29483370471238757, + "grad_norm": 2.5224912958538184, + "learning_rate": 2.4813403770844446e-06, + "loss": 0.5583, + "step": 7145 + }, + { + "epoch": 0.2948749690517455, + "grad_norm": 3.1364560960881533, + "learning_rate": 2.4811887486555085e-06, + "loss": 0.5276, + "step": 7146 + }, + { + "epoch": 0.2949162333911034, + "grad_norm": 2.4504715986239565, + "learning_rate": 2.481037102700087e-06, + "loss": 0.532, + "step": 7147 + }, + { + "epoch": 0.2949574977304613, + "grad_norm": 2.452189474286874, + "learning_rate": 2.4808854392208885e-06, + "loss": 0.6405, + "step": 7148 + }, + { + "epoch": 0.2949987620698193, + "grad_norm": 6.590625206624608, + "learning_rate": 2.480733758220622e-06, + "loss": 0.5921, + "step": 7149 + }, + { + "epoch": 0.2950400264091772, + "grad_norm": 2.430828305558559, + "learning_rate": 2.4805820597019975e-06, + "loss": 0.5252, + "step": 7150 + }, + { + "epoch": 0.2950812907485351, + "grad_norm": 4.962736424170447, + "learning_rate": 2.4804303436677246e-06, + "loss": 0.5566, + "step": 7151 + }, + { + "epoch": 0.29512255508789303, + "grad_norm": 4.526286830086906, + "learning_rate": 2.4802786101205124e-06, + "loss": 0.5107, + "step": 7152 + }, + { + "epoch": 0.29516381942725095, + "grad_norm": 9.403868815035818, + "learning_rate": 2.4801268590630725e-06, + "loss": 0.5832, + "step": 7153 + }, + { + "epoch": 0.2952050837666089, + "grad_norm": 2.548039744681703, + "learning_rate": 2.4799750904981147e-06, + "loss": 0.5652, + "step": 7154 + }, + { + "epoch": 0.29524634810596684, + "grad_norm": 2.262785923186905, + "learning_rate": 2.4798233044283503e-06, + "loss": 0.4692, + "step": 7155 + }, + { + "epoch": 0.29528761244532475, + "grad_norm": 6.563752857204633, + "learning_rate": 2.479671500856491e-06, + "loss": 0.5127, + "step": 7156 + }, + { + "epoch": 0.29532887678468267, + "grad_norm": 3.0836556269272037, + "learning_rate": 2.4795196797852475e-06, + "loss": 0.5307, + "step": 7157 + }, + { + "epoch": 0.2953701411240406, + "grad_norm": 3.0389655901522903, + "learning_rate": 2.4793678412173318e-06, + "loss": 0.5276, + "step": 7158 + }, + { + "epoch": 0.29541140546339856, + "grad_norm": 3.5958138215072015, + "learning_rate": 2.479215985155457e-06, + "loss": 0.505, + "step": 7159 + }, + { + "epoch": 0.2954526698027565, + "grad_norm": 2.189794540294904, + "learning_rate": 2.4790641116023353e-06, + "loss": 0.5762, + "step": 7160 + }, + { + "epoch": 0.2954939341421144, + "grad_norm": 4.2853219898795585, + "learning_rate": 2.478912220560679e-06, + "loss": 0.5462, + "step": 7161 + }, + { + "epoch": 0.2955351984814723, + "grad_norm": 3.994218063035985, + "learning_rate": 2.478760312033201e-06, + "loss": 0.5625, + "step": 7162 + }, + { + "epoch": 0.2955764628208302, + "grad_norm": 3.7099759252359754, + "learning_rate": 2.478608386022616e-06, + "loss": 0.5134, + "step": 7163 + }, + { + "epoch": 0.2956177271601882, + "grad_norm": 9.269215345293793, + "learning_rate": 2.4784564425316375e-06, + "loss": 0.5471, + "step": 7164 + }, + { + "epoch": 0.2956589914995461, + "grad_norm": 2.9873762558305925, + "learning_rate": 2.4783044815629784e-06, + "loss": 0.55, + "step": 7165 + }, + { + "epoch": 0.295700255838904, + "grad_norm": 4.368252049299063, + "learning_rate": 2.4781525031193546e-06, + "loss": 0.5736, + "step": 7166 + }, + { + "epoch": 0.29574152017826194, + "grad_norm": 2.4157340112964887, + "learning_rate": 2.4780005072034794e-06, + "loss": 0.5168, + "step": 7167 + }, + { + "epoch": 0.29578278451761986, + "grad_norm": 2.2633263024894945, + "learning_rate": 2.477848493818069e-06, + "loss": 0.5433, + "step": 7168 + }, + { + "epoch": 0.2958240488569778, + "grad_norm": 8.671852209481933, + "learning_rate": 2.477696462965838e-06, + "loss": 0.6142, + "step": 7169 + }, + { + "epoch": 0.29586531319633574, + "grad_norm": 6.498530837178392, + "learning_rate": 2.4775444146495027e-06, + "loss": 0.5662, + "step": 7170 + }, + { + "epoch": 0.29590657753569366, + "grad_norm": 2.830483465079488, + "learning_rate": 2.4773923488717786e-06, + "loss": 0.5744, + "step": 7171 + }, + { + "epoch": 0.2959478418750516, + "grad_norm": 2.359468353861965, + "learning_rate": 2.477240265635382e-06, + "loss": 0.5248, + "step": 7172 + }, + { + "epoch": 0.2959891062144095, + "grad_norm": 6.143674946492787, + "learning_rate": 2.4770881649430295e-06, + "loss": 0.5141, + "step": 7173 + }, + { + "epoch": 0.2960303705537674, + "grad_norm": 2.521741974037966, + "learning_rate": 2.476936046797439e-06, + "loss": 0.5739, + "step": 7174 + }, + { + "epoch": 0.2960716348931254, + "grad_norm": 2.2640716535923215, + "learning_rate": 2.4767839112013253e-06, + "loss": 0.5805, + "step": 7175 + }, + { + "epoch": 0.2961128992324833, + "grad_norm": 3.440418826733011, + "learning_rate": 2.4766317581574083e-06, + "loss": 0.5383, + "step": 7176 + }, + { + "epoch": 0.2961541635718412, + "grad_norm": 5.8276087754863655, + "learning_rate": 2.476479587668405e-06, + "loss": 0.5903, + "step": 7177 + }, + { + "epoch": 0.2961954279111991, + "grad_norm": 6.9292274690778575, + "learning_rate": 2.476327399737033e-06, + "loss": 0.5813, + "step": 7178 + }, + { + "epoch": 0.29623669225055704, + "grad_norm": 1.8352274247120504, + "learning_rate": 2.476175194366011e-06, + "loss": 0.5046, + "step": 7179 + }, + { + "epoch": 0.296277956589915, + "grad_norm": 2.8796749385472316, + "learning_rate": 2.476022971558059e-06, + "loss": 0.5165, + "step": 7180 + }, + { + "epoch": 0.29631922092927293, + "grad_norm": 7.600549499538564, + "learning_rate": 2.4758707313158943e-06, + "loss": 0.6148, + "step": 7181 + }, + { + "epoch": 0.29636048526863085, + "grad_norm": 2.9379749597810147, + "learning_rate": 2.4757184736422376e-06, + "loss": 0.5156, + "step": 7182 + }, + { + "epoch": 0.29640174960798876, + "grad_norm": 2.3372920648921087, + "learning_rate": 2.475566198539808e-06, + "loss": 0.5416, + "step": 7183 + }, + { + "epoch": 0.2964430139473467, + "grad_norm": 2.4683705716637188, + "learning_rate": 2.475413906011325e-06, + "loss": 0.5427, + "step": 7184 + }, + { + "epoch": 0.29648427828670465, + "grad_norm": 7.3845255322346235, + "learning_rate": 2.4752615960595103e-06, + "loss": 0.5219, + "step": 7185 + }, + { + "epoch": 0.29652554262606257, + "grad_norm": 2.611696187129561, + "learning_rate": 2.475109268687083e-06, + "loss": 0.51, + "step": 7186 + }, + { + "epoch": 0.2965668069654205, + "grad_norm": 3.9403387388786655, + "learning_rate": 2.4749569238967658e-06, + "loss": 0.5757, + "step": 7187 + }, + { + "epoch": 0.2966080713047784, + "grad_norm": 3.414570667853467, + "learning_rate": 2.4748045616912783e-06, + "loss": 0.5317, + "step": 7188 + }, + { + "epoch": 0.2966493356441363, + "grad_norm": 3.066555826008991, + "learning_rate": 2.474652182073343e-06, + "loss": 0.5144, + "step": 7189 + }, + { + "epoch": 0.2966905999834943, + "grad_norm": 3.176817829388575, + "learning_rate": 2.4744997850456815e-06, + "loss": 0.5786, + "step": 7190 + }, + { + "epoch": 0.2967318643228522, + "grad_norm": 3.957100009291682, + "learning_rate": 2.474347370611016e-06, + "loss": 0.5515, + "step": 7191 + }, + { + "epoch": 0.2967731286622101, + "grad_norm": 4.489117423049406, + "learning_rate": 2.4741949387720684e-06, + "loss": 0.5208, + "step": 7192 + }, + { + "epoch": 0.29681439300156803, + "grad_norm": 6.453602826004291, + "learning_rate": 2.4740424895315634e-06, + "loss": 0.5427, + "step": 7193 + }, + { + "epoch": 0.29685565734092595, + "grad_norm": 5.555441619419996, + "learning_rate": 2.473890022892222e-06, + "loss": 0.588, + "step": 7194 + }, + { + "epoch": 0.2968969216802839, + "grad_norm": 2.6413317573095036, + "learning_rate": 2.4737375388567685e-06, + "loss": 0.5516, + "step": 7195 + }, + { + "epoch": 0.29693818601964184, + "grad_norm": 3.2874773631146943, + "learning_rate": 2.4735850374279263e-06, + "loss": 0.5647, + "step": 7196 + }, + { + "epoch": 0.29697945035899975, + "grad_norm": 7.659799281795483, + "learning_rate": 2.4734325186084203e-06, + "loss": 0.5833, + "step": 7197 + }, + { + "epoch": 0.29702071469835767, + "grad_norm": 3.946258023686155, + "learning_rate": 2.4732799824009745e-06, + "loss": 0.5357, + "step": 7198 + }, + { + "epoch": 0.2970619790377156, + "grad_norm": 5.841240821628526, + "learning_rate": 2.473127428808313e-06, + "loss": 0.5496, + "step": 7199 + }, + { + "epoch": 0.29710324337707356, + "grad_norm": 2.610428626720819, + "learning_rate": 2.4729748578331615e-06, + "loss": 0.5212, + "step": 7200 + }, + { + "epoch": 0.29714450771643147, + "grad_norm": 4.133367348080668, + "learning_rate": 2.472822269478245e-06, + "loss": 0.5184, + "step": 7201 + }, + { + "epoch": 0.2971857720557894, + "grad_norm": 6.348580858313829, + "learning_rate": 2.472669663746289e-06, + "loss": 0.4899, + "step": 7202 + }, + { + "epoch": 0.2972270363951473, + "grad_norm": 3.159215635589698, + "learning_rate": 2.47251704064002e-06, + "loss": 0.5511, + "step": 7203 + }, + { + "epoch": 0.2972683007345052, + "grad_norm": 3.3922008330659255, + "learning_rate": 2.472364400162163e-06, + "loss": 0.5425, + "step": 7204 + }, + { + "epoch": 0.2973095650738632, + "grad_norm": 5.557131624669637, + "learning_rate": 2.4722117423154463e-06, + "loss": 0.5409, + "step": 7205 + }, + { + "epoch": 0.2973508294132211, + "grad_norm": 2.874985064628373, + "learning_rate": 2.4720590671025955e-06, + "loss": 0.6095, + "step": 7206 + }, + { + "epoch": 0.297392093752579, + "grad_norm": 2.2642575337419832, + "learning_rate": 2.471906374526338e-06, + "loss": 0.5381, + "step": 7207 + }, + { + "epoch": 0.29743335809193694, + "grad_norm": 7.83275046809437, + "learning_rate": 2.4717536645894005e-06, + "loss": 0.5156, + "step": 7208 + }, + { + "epoch": 0.29747462243129486, + "grad_norm": 2.0029713712551365, + "learning_rate": 2.471600937294512e-06, + "loss": 0.5498, + "step": 7209 + }, + { + "epoch": 0.2975158867706528, + "grad_norm": 2.7821685546513457, + "learning_rate": 2.4714481926444007e-06, + "loss": 0.552, + "step": 7210 + }, + { + "epoch": 0.29755715111001074, + "grad_norm": 4.621565979783032, + "learning_rate": 2.4712954306417944e-06, + "loss": 0.471, + "step": 7211 + }, + { + "epoch": 0.29759841544936866, + "grad_norm": 5.368539874119123, + "learning_rate": 2.4711426512894216e-06, + "loss": 0.5936, + "step": 7212 + }, + { + "epoch": 0.2976396797887266, + "grad_norm": 3.392333854770754, + "learning_rate": 2.4709898545900118e-06, + "loss": 0.541, + "step": 7213 + }, + { + "epoch": 0.2976809441280845, + "grad_norm": 2.3303571442798052, + "learning_rate": 2.470837040546294e-06, + "loss": 0.5229, + "step": 7214 + }, + { + "epoch": 0.29772220846744246, + "grad_norm": 2.6664929834402047, + "learning_rate": 2.4706842091609982e-06, + "loss": 0.6057, + "step": 7215 + }, + { + "epoch": 0.2977634728068004, + "grad_norm": 2.3149106806438997, + "learning_rate": 2.470531360436854e-06, + "loss": 0.5847, + "step": 7216 + }, + { + "epoch": 0.2978047371461583, + "grad_norm": 2.6134243810209927, + "learning_rate": 2.4703784943765915e-06, + "loss": 0.5662, + "step": 7217 + }, + { + "epoch": 0.2978460014855162, + "grad_norm": 6.545837741031439, + "learning_rate": 2.4702256109829424e-06, + "loss": 0.555, + "step": 7218 + }, + { + "epoch": 0.2978872658248741, + "grad_norm": 3.1385248675594095, + "learning_rate": 2.470072710258636e-06, + "loss": 0.569, + "step": 7219 + }, + { + "epoch": 0.2979285301642321, + "grad_norm": 2.4530493574082506, + "learning_rate": 2.469919792206405e-06, + "loss": 0.5685, + "step": 7220 + }, + { + "epoch": 0.29796979450359, + "grad_norm": 3.701314670025045, + "learning_rate": 2.4697668568289794e-06, + "loss": 0.5184, + "step": 7221 + }, + { + "epoch": 0.29801105884294793, + "grad_norm": 1.8755672271877921, + "learning_rate": 2.469613904129092e-06, + "loss": 0.4975, + "step": 7222 + }, + { + "epoch": 0.29805232318230584, + "grad_norm": 5.387865735868908, + "learning_rate": 2.4694609341094745e-06, + "loss": 0.5506, + "step": 7223 + }, + { + "epoch": 0.29809358752166376, + "grad_norm": 3.415962130056051, + "learning_rate": 2.4693079467728596e-06, + "loss": 0.5821, + "step": 7224 + }, + { + "epoch": 0.29813485186102173, + "grad_norm": 13.893336801304105, + "learning_rate": 2.46915494212198e-06, + "loss": 0.5025, + "step": 7225 + }, + { + "epoch": 0.29817611620037965, + "grad_norm": 2.821630949993926, + "learning_rate": 2.4690019201595693e-06, + "loss": 0.5259, + "step": 7226 + }, + { + "epoch": 0.29821738053973756, + "grad_norm": 17.666649949791385, + "learning_rate": 2.4688488808883595e-06, + "loss": 0.57, + "step": 7227 + }, + { + "epoch": 0.2982586448790955, + "grad_norm": 3.3954604098793544, + "learning_rate": 2.468695824311085e-06, + "loss": 0.607, + "step": 7228 + }, + { + "epoch": 0.2982999092184534, + "grad_norm": 3.7320860456941243, + "learning_rate": 2.4685427504304795e-06, + "loss": 0.5509, + "step": 7229 + }, + { + "epoch": 0.2983411735578113, + "grad_norm": 3.5289765902314616, + "learning_rate": 2.4683896592492782e-06, + "loss": 0.6174, + "step": 7230 + }, + { + "epoch": 0.2983824378971693, + "grad_norm": 3.033788882591399, + "learning_rate": 2.468236550770215e-06, + "loss": 0.5274, + "step": 7231 + }, + { + "epoch": 0.2984237022365272, + "grad_norm": 5.390000698188575, + "learning_rate": 2.4680834249960245e-06, + "loss": 0.5512, + "step": 7232 + }, + { + "epoch": 0.2984649665758851, + "grad_norm": 2.5427442449106317, + "learning_rate": 2.4679302819294426e-06, + "loss": 0.5558, + "step": 7233 + }, + { + "epoch": 0.29850623091524303, + "grad_norm": 3.3807583921646724, + "learning_rate": 2.4677771215732044e-06, + "loss": 0.5001, + "step": 7234 + }, + { + "epoch": 0.29854749525460095, + "grad_norm": 17.08286993513873, + "learning_rate": 2.4676239439300456e-06, + "loss": 0.568, + "step": 7235 + }, + { + "epoch": 0.2985887595939589, + "grad_norm": 3.302661778803789, + "learning_rate": 2.4674707490027022e-06, + "loss": 0.5498, + "step": 7236 + }, + { + "epoch": 0.29863002393331683, + "grad_norm": 4.619318352694362, + "learning_rate": 2.4673175367939114e-06, + "loss": 0.5956, + "step": 7237 + }, + { + "epoch": 0.29867128827267475, + "grad_norm": 2.6832025132385775, + "learning_rate": 2.4671643073064096e-06, + "loss": 0.5424, + "step": 7238 + }, + { + "epoch": 0.29871255261203267, + "grad_norm": 4.148319266056858, + "learning_rate": 2.467011060542934e-06, + "loss": 0.5123, + "step": 7239 + }, + { + "epoch": 0.2987538169513906, + "grad_norm": 4.0426022119822695, + "learning_rate": 2.4668577965062215e-06, + "loss": 0.5259, + "step": 7240 + }, + { + "epoch": 0.29879508129074855, + "grad_norm": 4.100138539204199, + "learning_rate": 2.46670451519901e-06, + "loss": 0.5714, + "step": 7241 + }, + { + "epoch": 0.29883634563010647, + "grad_norm": 3.702692562408387, + "learning_rate": 2.4665512166240374e-06, + "loss": 0.5643, + "step": 7242 + }, + { + "epoch": 0.2988776099694644, + "grad_norm": 2.8732441303249603, + "learning_rate": 2.4663979007840423e-06, + "loss": 0.5734, + "step": 7243 + }, + { + "epoch": 0.2989188743088223, + "grad_norm": 9.350821543183063, + "learning_rate": 2.4662445676817635e-06, + "loss": 0.5189, + "step": 7244 + }, + { + "epoch": 0.2989601386481802, + "grad_norm": 2.095603852845555, + "learning_rate": 2.4660912173199385e-06, + "loss": 0.5192, + "step": 7245 + }, + { + "epoch": 0.2990014029875382, + "grad_norm": 2.6654408927250293, + "learning_rate": 2.465937849701309e-06, + "loss": 0.5499, + "step": 7246 + }, + { + "epoch": 0.2990426673268961, + "grad_norm": 3.523920742073265, + "learning_rate": 2.465784464828612e-06, + "loss": 0.5243, + "step": 7247 + }, + { + "epoch": 0.299083931666254, + "grad_norm": 3.4562935687619945, + "learning_rate": 2.4656310627045887e-06, + "loss": 0.5576, + "step": 7248 + }, + { + "epoch": 0.29912519600561194, + "grad_norm": 2.443447703971294, + "learning_rate": 2.4654776433319787e-06, + "loss": 0.4967, + "step": 7249 + }, + { + "epoch": 0.29916646034496985, + "grad_norm": 4.823950243351283, + "learning_rate": 2.4653242067135235e-06, + "loss": 0.5439, + "step": 7250 + }, + { + "epoch": 0.2992077246843278, + "grad_norm": 2.5354580966598927, + "learning_rate": 2.4651707528519626e-06, + "loss": 0.5385, + "step": 7251 + }, + { + "epoch": 0.29924898902368574, + "grad_norm": 3.4071787314980835, + "learning_rate": 2.4650172817500375e-06, + "loss": 0.5743, + "step": 7252 + }, + { + "epoch": 0.29929025336304366, + "grad_norm": 2.8359399985336435, + "learning_rate": 2.4648637934104897e-06, + "loss": 0.5833, + "step": 7253 + }, + { + "epoch": 0.2993315177024016, + "grad_norm": 4.042498993674874, + "learning_rate": 2.4647102878360614e-06, + "loss": 0.5735, + "step": 7254 + }, + { + "epoch": 0.2993727820417595, + "grad_norm": 3.4453548226901094, + "learning_rate": 2.4645567650294935e-06, + "loss": 0.5373, + "step": 7255 + }, + { + "epoch": 0.29941404638111746, + "grad_norm": 10.888114174527473, + "learning_rate": 2.4644032249935285e-06, + "loss": 0.5818, + "step": 7256 + }, + { + "epoch": 0.2994553107204754, + "grad_norm": 177.4953484328724, + "learning_rate": 2.4642496677309097e-06, + "loss": 0.5367, + "step": 7257 + }, + { + "epoch": 0.2994965750598333, + "grad_norm": 3.353874370067619, + "learning_rate": 2.46409609324438e-06, + "loss": 0.5149, + "step": 7258 + }, + { + "epoch": 0.2995378393991912, + "grad_norm": 48.136984706493514, + "learning_rate": 2.463942501536682e-06, + "loss": 0.5166, + "step": 7259 + }, + { + "epoch": 0.2995791037385491, + "grad_norm": 6.335499713655374, + "learning_rate": 2.4637888926105595e-06, + "loss": 0.5584, + "step": 7260 + }, + { + "epoch": 0.2996203680779071, + "grad_norm": 7.083636203118324, + "learning_rate": 2.4636352664687563e-06, + "loss": 0.5512, + "step": 7261 + }, + { + "epoch": 0.299661632417265, + "grad_norm": 2.960830509858028, + "learning_rate": 2.4634816231140163e-06, + "loss": 0.5268, + "step": 7262 + }, + { + "epoch": 0.2997028967566229, + "grad_norm": 4.143452272865045, + "learning_rate": 2.4633279625490846e-06, + "loss": 0.5485, + "step": 7263 + }, + { + "epoch": 0.29974416109598084, + "grad_norm": 3.2970224835310065, + "learning_rate": 2.463174284776706e-06, + "loss": 0.5327, + "step": 7264 + }, + { + "epoch": 0.29978542543533876, + "grad_norm": 6.048746500078924, + "learning_rate": 2.4630205897996245e-06, + "loss": 0.5245, + "step": 7265 + }, + { + "epoch": 0.29982668977469673, + "grad_norm": 3.791891366268982, + "learning_rate": 2.4628668776205865e-06, + "loss": 0.539, + "step": 7266 + }, + { + "epoch": 0.29986795411405465, + "grad_norm": 7.732414474134504, + "learning_rate": 2.462713148242337e-06, + "loss": 0.5323, + "step": 7267 + }, + { + "epoch": 0.29990921845341256, + "grad_norm": 1.9580134827302726, + "learning_rate": 2.462559401667623e-06, + "loss": 0.4931, + "step": 7268 + }, + { + "epoch": 0.2999504827927705, + "grad_norm": 3.778740937664322, + "learning_rate": 2.4624056378991894e-06, + "loss": 0.5984, + "step": 7269 + }, + { + "epoch": 0.2999917471321284, + "grad_norm": 5.485871682845476, + "learning_rate": 2.4622518569397835e-06, + "loss": 0.4757, + "step": 7270 + }, + { + "epoch": 0.30003301147148637, + "grad_norm": 8.529852318097271, + "learning_rate": 2.462098058792153e-06, + "loss": 0.5578, + "step": 7271 + }, + { + "epoch": 0.3000742758108443, + "grad_norm": 2.2363995799418013, + "learning_rate": 2.4619442434590438e-06, + "loss": 0.4555, + "step": 7272 + }, + { + "epoch": 0.3001155401502022, + "grad_norm": 1.855783761133706, + "learning_rate": 2.4617904109432042e-06, + "loss": 0.5119, + "step": 7273 + }, + { + "epoch": 0.3001568044895601, + "grad_norm": 12.021082602175358, + "learning_rate": 2.4616365612473817e-06, + "loss": 0.5674, + "step": 7274 + }, + { + "epoch": 0.30019806882891803, + "grad_norm": 5.3739243084603325, + "learning_rate": 2.461482694374325e-06, + "loss": 0.5325, + "step": 7275 + }, + { + "epoch": 0.300239333168276, + "grad_norm": 6.974384825602823, + "learning_rate": 2.4613288103267817e-06, + "loss": 0.5959, + "step": 7276 + }, + { + "epoch": 0.3002805975076339, + "grad_norm": 3.4263474041044306, + "learning_rate": 2.461174909107501e-06, + "loss": 0.5177, + "step": 7277 + }, + { + "epoch": 0.30032186184699183, + "grad_norm": 3.2159647114555034, + "learning_rate": 2.4610209907192317e-06, + "loss": 0.5227, + "step": 7278 + }, + { + "epoch": 0.30036312618634975, + "grad_norm": 15.738175592751846, + "learning_rate": 2.460867055164724e-06, + "loss": 0.5414, + "step": 7279 + }, + { + "epoch": 0.30040439052570767, + "grad_norm": 4.0255621101640156, + "learning_rate": 2.4607131024467263e-06, + "loss": 0.5602, + "step": 7280 + }, + { + "epoch": 0.30044565486506564, + "grad_norm": 3.2214653655271985, + "learning_rate": 2.4605591325679896e-06, + "loss": 0.5678, + "step": 7281 + }, + { + "epoch": 0.30048691920442355, + "grad_norm": 2.998539072566533, + "learning_rate": 2.460405145531264e-06, + "loss": 0.5451, + "step": 7282 + }, + { + "epoch": 0.30052818354378147, + "grad_norm": 3.9699896323378177, + "learning_rate": 2.4602511413392995e-06, + "loss": 0.5253, + "step": 7283 + }, + { + "epoch": 0.3005694478831394, + "grad_norm": 3.2134073898281525, + "learning_rate": 2.460097119994848e-06, + "loss": 0.5428, + "step": 7284 + }, + { + "epoch": 0.3006107122224973, + "grad_norm": 2.463543757247137, + "learning_rate": 2.45994308150066e-06, + "loss": 0.5706, + "step": 7285 + }, + { + "epoch": 0.30065197656185527, + "grad_norm": 5.1619903963209355, + "learning_rate": 2.459789025859487e-06, + "loss": 0.6188, + "step": 7286 + }, + { + "epoch": 0.3006932409012132, + "grad_norm": 3.1178415432890763, + "learning_rate": 2.459634953074081e-06, + "loss": 0.5978, + "step": 7287 + }, + { + "epoch": 0.3007345052405711, + "grad_norm": 4.90372259168398, + "learning_rate": 2.459480863147194e-06, + "loss": 0.5443, + "step": 7288 + }, + { + "epoch": 0.300775769579929, + "grad_norm": 2.6518154893243993, + "learning_rate": 2.4593267560815786e-06, + "loss": 0.5882, + "step": 7289 + }, + { + "epoch": 0.30081703391928694, + "grad_norm": 3.2211127992570434, + "learning_rate": 2.4591726318799872e-06, + "loss": 0.5105, + "step": 7290 + }, + { + "epoch": 0.30085829825864485, + "grad_norm": 5.871360980820926, + "learning_rate": 2.4590184905451737e-06, + "loss": 0.536, + "step": 7291 + }, + { + "epoch": 0.3008995625980028, + "grad_norm": 2.1424958787160913, + "learning_rate": 2.4588643320798905e-06, + "loss": 0.484, + "step": 7292 + }, + { + "epoch": 0.30094082693736074, + "grad_norm": 5.186397238770398, + "learning_rate": 2.4587101564868918e-06, + "loss": 0.5129, + "step": 7293 + }, + { + "epoch": 0.30098209127671866, + "grad_norm": 2.826044939193216, + "learning_rate": 2.458555963768931e-06, + "loss": 0.536, + "step": 7294 + }, + { + "epoch": 0.30102335561607657, + "grad_norm": 2.2402815047018394, + "learning_rate": 2.4584017539287635e-06, + "loss": 0.5519, + "step": 7295 + }, + { + "epoch": 0.3010646199554345, + "grad_norm": 6.208783481562085, + "learning_rate": 2.458247526969142e-06, + "loss": 0.5556, + "step": 7296 + }, + { + "epoch": 0.30110588429479246, + "grad_norm": 3.3235688317908254, + "learning_rate": 2.4580932828928233e-06, + "loss": 0.5718, + "step": 7297 + }, + { + "epoch": 0.3011471486341504, + "grad_norm": 3.506999781585904, + "learning_rate": 2.4579390217025615e-06, + "loss": 0.5222, + "step": 7298 + }, + { + "epoch": 0.3011884129735083, + "grad_norm": 2.932350473868902, + "learning_rate": 2.457784743401113e-06, + "loss": 0.5486, + "step": 7299 + }, + { + "epoch": 0.3012296773128662, + "grad_norm": 6.088723127577506, + "learning_rate": 2.4576304479912323e-06, + "loss": 0.5518, + "step": 7300 + }, + { + "epoch": 0.3012709416522241, + "grad_norm": 4.1014240939535735, + "learning_rate": 2.457476135475676e-06, + "loss": 0.5484, + "step": 7301 + }, + { + "epoch": 0.3013122059915821, + "grad_norm": 4.564638270831675, + "learning_rate": 2.457321805857201e-06, + "loss": 0.5233, + "step": 7302 + }, + { + "epoch": 0.30135347033094, + "grad_norm": 2.451416679044176, + "learning_rate": 2.4571674591385638e-06, + "loss": 0.5519, + "step": 7303 + }, + { + "epoch": 0.3013947346702979, + "grad_norm": 3.7701814859711327, + "learning_rate": 2.4570130953225215e-06, + "loss": 0.5428, + "step": 7304 + }, + { + "epoch": 0.30143599900965584, + "grad_norm": 5.23180306715537, + "learning_rate": 2.456858714411831e-06, + "loss": 0.5298, + "step": 7305 + }, + { + "epoch": 0.30147726334901376, + "grad_norm": 6.495369775424129, + "learning_rate": 2.45670431640925e-06, + "loss": 0.4989, + "step": 7306 + }, + { + "epoch": 0.30151852768837173, + "grad_norm": 3.0842444099787247, + "learning_rate": 2.4565499013175367e-06, + "loss": 0.5218, + "step": 7307 + }, + { + "epoch": 0.30155979202772965, + "grad_norm": 3.6516335727935556, + "learning_rate": 2.4563954691394494e-06, + "loss": 0.5544, + "step": 7308 + }, + { + "epoch": 0.30160105636708756, + "grad_norm": 3.535036208354836, + "learning_rate": 2.456241019877746e-06, + "loss": 0.5541, + "step": 7309 + }, + { + "epoch": 0.3016423207064455, + "grad_norm": 4.449971751192028, + "learning_rate": 2.4560865535351862e-06, + "loss": 0.5073, + "step": 7310 + }, + { + "epoch": 0.3016835850458034, + "grad_norm": 4.041010831389329, + "learning_rate": 2.4559320701145287e-06, + "loss": 0.524, + "step": 7311 + }, + { + "epoch": 0.30172484938516136, + "grad_norm": 10.82021438234284, + "learning_rate": 2.4557775696185336e-06, + "loss": 0.5593, + "step": 7312 + }, + { + "epoch": 0.3017661137245193, + "grad_norm": 3.4911169280009178, + "learning_rate": 2.4556230520499597e-06, + "loss": 0.5617, + "step": 7313 + }, + { + "epoch": 0.3018073780638772, + "grad_norm": 3.5473592348111476, + "learning_rate": 2.455468517411567e-06, + "loss": 0.5572, + "step": 7314 + }, + { + "epoch": 0.3018486424032351, + "grad_norm": 5.179874250960102, + "learning_rate": 2.455313965706117e-06, + "loss": 0.517, + "step": 7315 + }, + { + "epoch": 0.30188990674259303, + "grad_norm": 3.881059675222071, + "learning_rate": 2.4551593969363694e-06, + "loss": 0.5359, + "step": 7316 + }, + { + "epoch": 0.301931171081951, + "grad_norm": 5.468748023914768, + "learning_rate": 2.455004811105086e-06, + "loss": 0.6041, + "step": 7317 + }, + { + "epoch": 0.3019724354213089, + "grad_norm": 6.639872999341184, + "learning_rate": 2.4548502082150276e-06, + "loss": 0.565, + "step": 7318 + }, + { + "epoch": 0.30201369976066683, + "grad_norm": 4.8764324638242655, + "learning_rate": 2.454695588268955e-06, + "loss": 0.5723, + "step": 7319 + }, + { + "epoch": 0.30205496410002475, + "grad_norm": 2.7482303380446216, + "learning_rate": 2.4545409512696317e-06, + "loss": 0.5574, + "step": 7320 + }, + { + "epoch": 0.30209622843938266, + "grad_norm": 15.170744006800144, + "learning_rate": 2.4543862972198185e-06, + "loss": 0.6003, + "step": 7321 + }, + { + "epoch": 0.30213749277874064, + "grad_norm": 3.0070101095755453, + "learning_rate": 2.454231626122279e-06, + "loss": 0.4957, + "step": 7322 + }, + { + "epoch": 0.30217875711809855, + "grad_norm": 2.959350801140659, + "learning_rate": 2.4540769379797753e-06, + "loss": 0.5578, + "step": 7323 + }, + { + "epoch": 0.30222002145745647, + "grad_norm": 14.193371560937251, + "learning_rate": 2.453922232795071e-06, + "loss": 0.5742, + "step": 7324 + }, + { + "epoch": 0.3022612857968144, + "grad_norm": 2.9246881373930673, + "learning_rate": 2.4537675105709292e-06, + "loss": 0.5895, + "step": 7325 + }, + { + "epoch": 0.3023025501361723, + "grad_norm": 4.222147158544781, + "learning_rate": 2.4536127713101135e-06, + "loss": 0.5356, + "step": 7326 + }, + { + "epoch": 0.30234381447553027, + "grad_norm": 2.911476348868486, + "learning_rate": 2.453458015015388e-06, + "loss": 0.5145, + "step": 7327 + }, + { + "epoch": 0.3023850788148882, + "grad_norm": 5.085737896608673, + "learning_rate": 2.4533032416895175e-06, + "loss": 0.5029, + "step": 7328 + }, + { + "epoch": 0.3024263431542461, + "grad_norm": 2.8098506137675567, + "learning_rate": 2.453148451335266e-06, + "loss": 0.5518, + "step": 7329 + }, + { + "epoch": 0.302467607493604, + "grad_norm": 4.459298649284775, + "learning_rate": 2.452993643955399e-06, + "loss": 0.5733, + "step": 7330 + }, + { + "epoch": 0.30250887183296193, + "grad_norm": 4.641989014958716, + "learning_rate": 2.4528388195526817e-06, + "loss": 0.5893, + "step": 7331 + }, + { + "epoch": 0.3025501361723199, + "grad_norm": 3.4546446207655586, + "learning_rate": 2.452683978129879e-06, + "loss": 0.615, + "step": 7332 + }, + { + "epoch": 0.3025914005116778, + "grad_norm": 1.9099404936160949, + "learning_rate": 2.4525291196897577e-06, + "loss": 0.5006, + "step": 7333 + }, + { + "epoch": 0.30263266485103574, + "grad_norm": 3.545768258996141, + "learning_rate": 2.4523742442350827e-06, + "loss": 0.5381, + "step": 7334 + }, + { + "epoch": 0.30267392919039365, + "grad_norm": 3.774764439669889, + "learning_rate": 2.452219351768622e-06, + "loss": 0.549, + "step": 7335 + }, + { + "epoch": 0.30271519352975157, + "grad_norm": 2.8894087717537573, + "learning_rate": 2.4520644422931413e-06, + "loss": 0.5632, + "step": 7336 + }, + { + "epoch": 0.30275645786910954, + "grad_norm": 2.550705409816509, + "learning_rate": 2.451909515811408e-06, + "loss": 0.5331, + "step": 7337 + }, + { + "epoch": 0.30279772220846746, + "grad_norm": 4.551082414309195, + "learning_rate": 2.4517545723261894e-06, + "loss": 0.5096, + "step": 7338 + }, + { + "epoch": 0.3028389865478254, + "grad_norm": 4.114086563475433, + "learning_rate": 2.4515996118402532e-06, + "loss": 0.5675, + "step": 7339 + }, + { + "epoch": 0.3028802508871833, + "grad_norm": 3.865959158382059, + "learning_rate": 2.4514446343563674e-06, + "loss": 0.5404, + "step": 7340 + }, + { + "epoch": 0.3029215152265412, + "grad_norm": 2.894065161963604, + "learning_rate": 2.4512896398773003e-06, + "loss": 0.521, + "step": 7341 + }, + { + "epoch": 0.3029627795658992, + "grad_norm": 6.111965264051844, + "learning_rate": 2.4511346284058202e-06, + "loss": 0.4876, + "step": 7342 + }, + { + "epoch": 0.3030040439052571, + "grad_norm": 3.6860807968572393, + "learning_rate": 2.4509795999446964e-06, + "loss": 0.5729, + "step": 7343 + }, + { + "epoch": 0.303045308244615, + "grad_norm": 2.618473354768518, + "learning_rate": 2.4508245544966987e-06, + "loss": 0.5617, + "step": 7344 + }, + { + "epoch": 0.3030865725839729, + "grad_norm": 4.059396962836604, + "learning_rate": 2.4506694920645947e-06, + "loss": 0.5049, + "step": 7345 + }, + { + "epoch": 0.30312783692333084, + "grad_norm": 3.560378855940176, + "learning_rate": 2.450514412651156e-06, + "loss": 0.4611, + "step": 7346 + }, + { + "epoch": 0.3031691012626888, + "grad_norm": 2.539502558028906, + "learning_rate": 2.450359316259152e-06, + "loss": 0.5793, + "step": 7347 + }, + { + "epoch": 0.3032103656020467, + "grad_norm": 4.971872838834708, + "learning_rate": 2.4502042028913527e-06, + "loss": 0.5285, + "step": 7348 + }, + { + "epoch": 0.30325162994140464, + "grad_norm": 3.4477193977606615, + "learning_rate": 2.45004907255053e-06, + "loss": 0.5563, + "step": 7349 + }, + { + "epoch": 0.30329289428076256, + "grad_norm": 3.453375030996255, + "learning_rate": 2.4498939252394536e-06, + "loss": 0.5425, + "step": 7350 + }, + { + "epoch": 0.3033341586201205, + "grad_norm": 2.35373998447392, + "learning_rate": 2.4497387609608962e-06, + "loss": 0.5228, + "step": 7351 + }, + { + "epoch": 0.3033754229594784, + "grad_norm": 14.044293529738406, + "learning_rate": 2.4495835797176284e-06, + "loss": 0.5679, + "step": 7352 + }, + { + "epoch": 0.30341668729883636, + "grad_norm": 4.032388677588394, + "learning_rate": 2.4494283815124216e-06, + "loss": 0.5667, + "step": 7353 + }, + { + "epoch": 0.3034579516381943, + "grad_norm": 7.652906284588814, + "learning_rate": 2.4492731663480495e-06, + "loss": 0.5894, + "step": 7354 + }, + { + "epoch": 0.3034992159775522, + "grad_norm": 2.9942712305557366, + "learning_rate": 2.4491179342272835e-06, + "loss": 0.5605, + "step": 7355 + }, + { + "epoch": 0.3035404803169101, + "grad_norm": 4.587861601125717, + "learning_rate": 2.4489626851528973e-06, + "loss": 0.5938, + "step": 7356 + }, + { + "epoch": 0.303581744656268, + "grad_norm": 3.06907655727024, + "learning_rate": 2.448807419127663e-06, + "loss": 0.5468, + "step": 7357 + }, + { + "epoch": 0.303623008995626, + "grad_norm": 3.5915653867528907, + "learning_rate": 2.448652136154356e-06, + "loss": 0.5456, + "step": 7358 + }, + { + "epoch": 0.3036642733349839, + "grad_norm": 3.578082972803798, + "learning_rate": 2.448496836235748e-06, + "loss": 0.5116, + "step": 7359 + }, + { + "epoch": 0.30370553767434183, + "grad_norm": 2.273557815563811, + "learning_rate": 2.448341519374613e-06, + "loss": 0.5507, + "step": 7360 + }, + { + "epoch": 0.30374680201369975, + "grad_norm": 9.0279712977771, + "learning_rate": 2.4481861855737266e-06, + "loss": 0.5183, + "step": 7361 + }, + { + "epoch": 0.30378806635305766, + "grad_norm": 2.728177449149446, + "learning_rate": 2.448030834835863e-06, + "loss": 0.5191, + "step": 7362 + }, + { + "epoch": 0.30382933069241563, + "grad_norm": 4.333140871091913, + "learning_rate": 2.4478754671637966e-06, + "loss": 0.5522, + "step": 7363 + }, + { + "epoch": 0.30387059503177355, + "grad_norm": 4.775538676724609, + "learning_rate": 2.4477200825603035e-06, + "loss": 0.6091, + "step": 7364 + }, + { + "epoch": 0.30391185937113147, + "grad_norm": 4.098171677296712, + "learning_rate": 2.447564681028159e-06, + "loss": 0.5036, + "step": 7365 + }, + { + "epoch": 0.3039531237104894, + "grad_norm": 2.4611447639699904, + "learning_rate": 2.4474092625701387e-06, + "loss": 0.5717, + "step": 7366 + }, + { + "epoch": 0.3039943880498473, + "grad_norm": 2.641294519962719, + "learning_rate": 2.447253827189019e-06, + "loss": 0.5384, + "step": 7367 + }, + { + "epoch": 0.30403565238920527, + "grad_norm": 3.19108621503302, + "learning_rate": 2.4470983748875763e-06, + "loss": 0.5585, + "step": 7368 + }, + { + "epoch": 0.3040769167285632, + "grad_norm": 2.9706500713205184, + "learning_rate": 2.446942905668587e-06, + "loss": 0.5696, + "step": 7369 + }, + { + "epoch": 0.3041181810679211, + "grad_norm": 2.9556470642768655, + "learning_rate": 2.4467874195348295e-06, + "loss": 0.5878, + "step": 7370 + }, + { + "epoch": 0.304159445407279, + "grad_norm": 5.220896837804817, + "learning_rate": 2.4466319164890795e-06, + "loss": 0.5275, + "step": 7371 + }, + { + "epoch": 0.30420070974663693, + "grad_norm": 3.3273914624428818, + "learning_rate": 2.4464763965341154e-06, + "loss": 0.5181, + "step": 7372 + }, + { + "epoch": 0.3042419740859949, + "grad_norm": 3.5625749569739877, + "learning_rate": 2.446320859672715e-06, + "loss": 0.5719, + "step": 7373 + }, + { + "epoch": 0.3042832384253528, + "grad_norm": 3.088224588243024, + "learning_rate": 2.4461653059076566e-06, + "loss": 0.5998, + "step": 7374 + }, + { + "epoch": 0.30432450276471074, + "grad_norm": 3.1329432018725734, + "learning_rate": 2.4460097352417193e-06, + "loss": 0.522, + "step": 7375 + }, + { + "epoch": 0.30436576710406865, + "grad_norm": 3.4435702735387146, + "learning_rate": 2.445854147677681e-06, + "loss": 0.526, + "step": 7376 + }, + { + "epoch": 0.30440703144342657, + "grad_norm": 9.941916217252771, + "learning_rate": 2.4456985432183224e-06, + "loss": 0.5287, + "step": 7377 + }, + { + "epoch": 0.30444829578278454, + "grad_norm": 4.738335349925625, + "learning_rate": 2.4455429218664214e-06, + "loss": 0.5281, + "step": 7378 + }, + { + "epoch": 0.30448956012214246, + "grad_norm": 2.903932594429993, + "learning_rate": 2.4453872836247584e-06, + "loss": 0.5912, + "step": 7379 + }, + { + "epoch": 0.30453082446150037, + "grad_norm": 4.816392153828908, + "learning_rate": 2.445231628496114e-06, + "loss": 0.5232, + "step": 7380 + }, + { + "epoch": 0.3045720888008583, + "grad_norm": 4.4161694100620235, + "learning_rate": 2.445075956483267e-06, + "loss": 0.5519, + "step": 7381 + }, + { + "epoch": 0.3046133531402162, + "grad_norm": 4.191276629518717, + "learning_rate": 2.4449202675890005e-06, + "loss": 0.5474, + "step": 7382 + }, + { + "epoch": 0.3046546174795742, + "grad_norm": 2.161762051189429, + "learning_rate": 2.4447645618160933e-06, + "loss": 0.5292, + "step": 7383 + }, + { + "epoch": 0.3046958818189321, + "grad_norm": 3.4069025835879665, + "learning_rate": 2.4446088391673282e-06, + "loss": 0.5607, + "step": 7384 + }, + { + "epoch": 0.30473714615829, + "grad_norm": 4.503706313470429, + "learning_rate": 2.444453099645486e-06, + "loss": 0.5272, + "step": 7385 + }, + { + "epoch": 0.3047784104976479, + "grad_norm": 3.182451398855997, + "learning_rate": 2.4442973432533487e-06, + "loss": 0.5156, + "step": 7386 + }, + { + "epoch": 0.30481967483700584, + "grad_norm": 8.435771201620051, + "learning_rate": 2.4441415699936983e-06, + "loss": 0.5337, + "step": 7387 + }, + { + "epoch": 0.3048609391763638, + "grad_norm": 10.474533311414707, + "learning_rate": 2.4439857798693175e-06, + "loss": 0.4885, + "step": 7388 + }, + { + "epoch": 0.3049022035157217, + "grad_norm": 10.678985746162962, + "learning_rate": 2.44382997288299e-06, + "loss": 0.5335, + "step": 7389 + }, + { + "epoch": 0.30494346785507964, + "grad_norm": 8.213714115849161, + "learning_rate": 2.4436741490374974e-06, + "loss": 0.5513, + "step": 7390 + }, + { + "epoch": 0.30498473219443756, + "grad_norm": 3.776261806184731, + "learning_rate": 2.4435183083356244e-06, + "loss": 0.5421, + "step": 7391 + }, + { + "epoch": 0.3050259965337955, + "grad_norm": 6.309189333292208, + "learning_rate": 2.4433624507801535e-06, + "loss": 0.6257, + "step": 7392 + }, + { + "epoch": 0.30506726087315345, + "grad_norm": 12.060250668010582, + "learning_rate": 2.4432065763738696e-06, + "loss": 0.5193, + "step": 7393 + }, + { + "epoch": 0.30510852521251136, + "grad_norm": 2.7803127885374486, + "learning_rate": 2.4430506851195567e-06, + "loss": 0.562, + "step": 7394 + }, + { + "epoch": 0.3051497895518693, + "grad_norm": 3.0411990214102134, + "learning_rate": 2.442894777019999e-06, + "loss": 0.5453, + "step": 7395 + }, + { + "epoch": 0.3051910538912272, + "grad_norm": 3.22241681290383, + "learning_rate": 2.4427388520779823e-06, + "loss": 0.5284, + "step": 7396 + }, + { + "epoch": 0.3052323182305851, + "grad_norm": 5.284525562157403, + "learning_rate": 2.4425829102962915e-06, + "loss": 0.55, + "step": 7397 + }, + { + "epoch": 0.3052735825699431, + "grad_norm": 6.3493060726158985, + "learning_rate": 2.442426951677711e-06, + "loss": 0.5657, + "step": 7398 + }, + { + "epoch": 0.305314846909301, + "grad_norm": 4.945387844000755, + "learning_rate": 2.4422709762250283e-06, + "loss": 0.4803, + "step": 7399 + }, + { + "epoch": 0.3053561112486589, + "grad_norm": 7.444222907191165, + "learning_rate": 2.4421149839410287e-06, + "loss": 0.5822, + "step": 7400 + }, + { + "epoch": 0.30539737558801683, + "grad_norm": 4.464373627621146, + "learning_rate": 2.4419589748284987e-06, + "loss": 0.5377, + "step": 7401 + }, + { + "epoch": 0.30543863992737474, + "grad_norm": 4.978561463821525, + "learning_rate": 2.441802948890225e-06, + "loss": 0.556, + "step": 7402 + }, + { + "epoch": 0.3054799042667327, + "grad_norm": 2.9641049896871747, + "learning_rate": 2.4416469061289944e-06, + "loss": 0.5253, + "step": 7403 + }, + { + "epoch": 0.30552116860609063, + "grad_norm": 5.260439313206625, + "learning_rate": 2.4414908465475944e-06, + "loss": 0.5657, + "step": 7404 + }, + { + "epoch": 0.30556243294544855, + "grad_norm": 4.186960671224988, + "learning_rate": 2.4413347701488126e-06, + "loss": 0.5569, + "step": 7405 + }, + { + "epoch": 0.30560369728480646, + "grad_norm": 3.5168651850145145, + "learning_rate": 2.441178676935437e-06, + "loss": 0.5379, + "step": 7406 + }, + { + "epoch": 0.3056449616241644, + "grad_norm": 2.5917239657351567, + "learning_rate": 2.4410225669102562e-06, + "loss": 0.5643, + "step": 7407 + }, + { + "epoch": 0.30568622596352235, + "grad_norm": 2.699375195415711, + "learning_rate": 2.4408664400760577e-06, + "loss": 0.558, + "step": 7408 + }, + { + "epoch": 0.30572749030288027, + "grad_norm": 3.534374835119721, + "learning_rate": 2.440710296435631e-06, + "loss": 0.5369, + "step": 7409 + }, + { + "epoch": 0.3057687546422382, + "grad_norm": 3.4967071519188497, + "learning_rate": 2.4405541359917656e-06, + "loss": 0.5086, + "step": 7410 + }, + { + "epoch": 0.3058100189815961, + "grad_norm": 6.553177547808891, + "learning_rate": 2.4403979587472495e-06, + "loss": 0.5783, + "step": 7411 + }, + { + "epoch": 0.305851283320954, + "grad_norm": 4.328846191311636, + "learning_rate": 2.4402417647048744e-06, + "loss": 0.5366, + "step": 7412 + }, + { + "epoch": 0.30589254766031193, + "grad_norm": 2.458498139561035, + "learning_rate": 2.4400855538674284e-06, + "loss": 0.5674, + "step": 7413 + }, + { + "epoch": 0.3059338119996699, + "grad_norm": 5.036634636054688, + "learning_rate": 2.439929326237703e-06, + "loss": 0.5368, + "step": 7414 + }, + { + "epoch": 0.3059750763390278, + "grad_norm": 5.165309276755114, + "learning_rate": 2.4397730818184888e-06, + "loss": 0.5918, + "step": 7415 + }, + { + "epoch": 0.30601634067838573, + "grad_norm": 9.799074372349615, + "learning_rate": 2.439616820612576e-06, + "loss": 0.5226, + "step": 7416 + }, + { + "epoch": 0.30605760501774365, + "grad_norm": 6.095675450737737, + "learning_rate": 2.4394605426227563e-06, + "loss": 0.5538, + "step": 7417 + }, + { + "epoch": 0.30609886935710157, + "grad_norm": 4.926062755498334, + "learning_rate": 2.439304247851821e-06, + "loss": 0.5415, + "step": 7418 + }, + { + "epoch": 0.30614013369645954, + "grad_norm": 4.582469278090721, + "learning_rate": 2.4391479363025626e-06, + "loss": 0.5713, + "step": 7419 + }, + { + "epoch": 0.30618139803581745, + "grad_norm": 3.6973056265786326, + "learning_rate": 2.438991607977772e-06, + "loss": 0.5077, + "step": 7420 + }, + { + "epoch": 0.30622266237517537, + "grad_norm": 4.473337469290979, + "learning_rate": 2.438835262880242e-06, + "loss": 0.528, + "step": 7421 + }, + { + "epoch": 0.3062639267145333, + "grad_norm": 18.172956543707226, + "learning_rate": 2.4386789010127665e-06, + "loss": 0.562, + "step": 7422 + }, + { + "epoch": 0.3063051910538912, + "grad_norm": 3.1367977204489397, + "learning_rate": 2.4385225223781377e-06, + "loss": 0.5089, + "step": 7423 + }, + { + "epoch": 0.3063464553932492, + "grad_norm": 3.6724948468279064, + "learning_rate": 2.4383661269791485e-06, + "loss": 0.5677, + "step": 7424 + }, + { + "epoch": 0.3063877197326071, + "grad_norm": 2.995232561908742, + "learning_rate": 2.438209714818593e-06, + "loss": 0.5217, + "step": 7425 + }, + { + "epoch": 0.306428984071965, + "grad_norm": 2.90193941269357, + "learning_rate": 2.4380532858992642e-06, + "loss": 0.5038, + "step": 7426 + }, + { + "epoch": 0.3064702484113229, + "grad_norm": 3.1950405974767, + "learning_rate": 2.437896840223958e-06, + "loss": 0.577, + "step": 7427 + }, + { + "epoch": 0.30651151275068084, + "grad_norm": 5.135088463574064, + "learning_rate": 2.437740377795467e-06, + "loss": 0.5584, + "step": 7428 + }, + { + "epoch": 0.3065527770900388, + "grad_norm": 4.043979456455347, + "learning_rate": 2.437583898616588e-06, + "loss": 0.5339, + "step": 7429 + }, + { + "epoch": 0.3065940414293967, + "grad_norm": 2.818331964476096, + "learning_rate": 2.4374274026901144e-06, + "loss": 0.553, + "step": 7430 + }, + { + "epoch": 0.30663530576875464, + "grad_norm": 6.408412950545515, + "learning_rate": 2.4372708900188427e-06, + "loss": 0.5037, + "step": 7431 + }, + { + "epoch": 0.30667657010811256, + "grad_norm": 20.27633900271968, + "learning_rate": 2.4371143606055683e-06, + "loss": 0.5516, + "step": 7432 + }, + { + "epoch": 0.30671783444747047, + "grad_norm": 5.698499663036369, + "learning_rate": 2.4369578144530866e-06, + "loss": 0.4997, + "step": 7433 + }, + { + "epoch": 0.30675909878682844, + "grad_norm": 3.8235525396265784, + "learning_rate": 2.4368012515641943e-06, + "loss": 0.5592, + "step": 7434 + }, + { + "epoch": 0.30680036312618636, + "grad_norm": 2.2111893975141848, + "learning_rate": 2.4366446719416885e-06, + "loss": 0.5448, + "step": 7435 + }, + { + "epoch": 0.3068416274655443, + "grad_norm": 3.060916533446601, + "learning_rate": 2.436488075588366e-06, + "loss": 0.5182, + "step": 7436 + }, + { + "epoch": 0.3068828918049022, + "grad_norm": 18.107528758024465, + "learning_rate": 2.436331462507024e-06, + "loss": 0.5387, + "step": 7437 + }, + { + "epoch": 0.3069241561442601, + "grad_norm": 3.132530293827925, + "learning_rate": 2.4361748327004592e-06, + "loss": 0.5381, + "step": 7438 + }, + { + "epoch": 0.3069654204836181, + "grad_norm": 2.6623614396752515, + "learning_rate": 2.4360181861714697e-06, + "loss": 0.6101, + "step": 7439 + }, + { + "epoch": 0.307006684822976, + "grad_norm": 2.4715455965025868, + "learning_rate": 2.4358615229228536e-06, + "loss": 0.5088, + "step": 7440 + }, + { + "epoch": 0.3070479491623339, + "grad_norm": 6.3411884209376, + "learning_rate": 2.4357048429574105e-06, + "loss": 0.5465, + "step": 7441 + }, + { + "epoch": 0.3070892135016918, + "grad_norm": 3.5430607072933906, + "learning_rate": 2.4355481462779375e-06, + "loss": 0.5488, + "step": 7442 + }, + { + "epoch": 0.30713047784104974, + "grad_norm": 2.954345262464635, + "learning_rate": 2.4353914328872342e-06, + "loss": 0.5674, + "step": 7443 + }, + { + "epoch": 0.3071717421804077, + "grad_norm": 2.763029034913119, + "learning_rate": 2.4352347027881005e-06, + "loss": 0.595, + "step": 7444 + }, + { + "epoch": 0.30721300651976563, + "grad_norm": 4.013849085592381, + "learning_rate": 2.435077955983335e-06, + "loss": 0.558, + "step": 7445 + }, + { + "epoch": 0.30725427085912355, + "grad_norm": 2.8102222868929094, + "learning_rate": 2.434921192475738e-06, + "loss": 0.5514, + "step": 7446 + }, + { + "epoch": 0.30729553519848146, + "grad_norm": 3.254501896215857, + "learning_rate": 2.4347644122681097e-06, + "loss": 0.5201, + "step": 7447 + }, + { + "epoch": 0.3073367995378394, + "grad_norm": 1.704054029102033, + "learning_rate": 2.4346076153632502e-06, + "loss": 0.4778, + "step": 7448 + }, + { + "epoch": 0.30737806387719735, + "grad_norm": 3.214188096508404, + "learning_rate": 2.4344508017639613e-06, + "loss": 0.5648, + "step": 7449 + }, + { + "epoch": 0.30741932821655527, + "grad_norm": 2.226482218745903, + "learning_rate": 2.4342939714730433e-06, + "loss": 0.5591, + "step": 7450 + }, + { + "epoch": 0.3074605925559132, + "grad_norm": 4.6516183994737474, + "learning_rate": 2.4341371244932977e-06, + "loss": 0.549, + "step": 7451 + }, + { + "epoch": 0.3075018568952711, + "grad_norm": 4.751133043153464, + "learning_rate": 2.433980260827526e-06, + "loss": 0.5796, + "step": 7452 + }, + { + "epoch": 0.307543121234629, + "grad_norm": 2.669353602545147, + "learning_rate": 2.4338233804785304e-06, + "loss": 0.5414, + "step": 7453 + }, + { + "epoch": 0.307584385573987, + "grad_norm": 2.5489624044911117, + "learning_rate": 2.433666483449113e-06, + "loss": 0.4786, + "step": 7454 + }, + { + "epoch": 0.3076256499133449, + "grad_norm": 2.865244604869039, + "learning_rate": 2.433509569742077e-06, + "loss": 0.5699, + "step": 7455 + }, + { + "epoch": 0.3076669142527028, + "grad_norm": 2.9006770549713528, + "learning_rate": 2.4333526393602246e-06, + "loss": 0.5596, + "step": 7456 + }, + { + "epoch": 0.30770817859206073, + "grad_norm": 3.6563043574253866, + "learning_rate": 2.4331956923063594e-06, + "loss": 0.5785, + "step": 7457 + }, + { + "epoch": 0.30774944293141865, + "grad_norm": 3.1118763974630994, + "learning_rate": 2.4330387285832844e-06, + "loss": 0.4904, + "step": 7458 + }, + { + "epoch": 0.3077907072707766, + "grad_norm": 3.163717021651923, + "learning_rate": 2.4328817481938036e-06, + "loss": 0.5265, + "step": 7459 + }, + { + "epoch": 0.30783197161013454, + "grad_norm": 7.402126979596714, + "learning_rate": 2.432724751140721e-06, + "loss": 0.5677, + "step": 7460 + }, + { + "epoch": 0.30787323594949245, + "grad_norm": 2.572099510932186, + "learning_rate": 2.432567737426841e-06, + "loss": 0.5101, + "step": 7461 + }, + { + "epoch": 0.30791450028885037, + "grad_norm": 3.877719437299149, + "learning_rate": 2.432410707054968e-06, + "loss": 0.565, + "step": 7462 + }, + { + "epoch": 0.3079557646282083, + "grad_norm": 2.581022815202969, + "learning_rate": 2.4322536600279076e-06, + "loss": 0.5007, + "step": 7463 + }, + { + "epoch": 0.30799702896756626, + "grad_norm": 3.34381213844746, + "learning_rate": 2.432096596348465e-06, + "loss": 0.5334, + "step": 7464 + }, + { + "epoch": 0.30803829330692417, + "grad_norm": 3.7707567511973368, + "learning_rate": 2.431939516019445e-06, + "loss": 0.5297, + "step": 7465 + }, + { + "epoch": 0.3080795576462821, + "grad_norm": 3.167178585071308, + "learning_rate": 2.431782419043654e-06, + "loss": 0.5466, + "step": 7466 + }, + { + "epoch": 0.30812082198564, + "grad_norm": 2.860028553162379, + "learning_rate": 2.431625305423898e-06, + "loss": 0.575, + "step": 7467 + }, + { + "epoch": 0.3081620863249979, + "grad_norm": 4.992646312190136, + "learning_rate": 2.4314681751629837e-06, + "loss": 0.5119, + "step": 7468 + }, + { + "epoch": 0.3082033506643559, + "grad_norm": 3.2765031219500536, + "learning_rate": 2.4313110282637175e-06, + "loss": 0.5569, + "step": 7469 + }, + { + "epoch": 0.3082446150037138, + "grad_norm": 2.7912189160661383, + "learning_rate": 2.4311538647289066e-06, + "loss": 0.5298, + "step": 7470 + }, + { + "epoch": 0.3082858793430717, + "grad_norm": 6.233664334732093, + "learning_rate": 2.430996684561358e-06, + "loss": 0.5067, + "step": 7471 + }, + { + "epoch": 0.30832714368242964, + "grad_norm": 3.88460043967256, + "learning_rate": 2.4308394877638796e-06, + "loss": 0.5774, + "step": 7472 + }, + { + "epoch": 0.30836840802178755, + "grad_norm": 3.4442455271761103, + "learning_rate": 2.4306822743392796e-06, + "loss": 0.508, + "step": 7473 + }, + { + "epoch": 0.30840967236114547, + "grad_norm": 4.742290480039093, + "learning_rate": 2.4305250442903655e-06, + "loss": 0.615, + "step": 7474 + }, + { + "epoch": 0.30845093670050344, + "grad_norm": 4.2599489409478695, + "learning_rate": 2.430367797619947e-06, + "loss": 0.5657, + "step": 7475 + }, + { + "epoch": 0.30849220103986136, + "grad_norm": 4.65391103677403, + "learning_rate": 2.4302105343308324e-06, + "loss": 0.5575, + "step": 7476 + }, + { + "epoch": 0.3085334653792193, + "grad_norm": 12.365310411619934, + "learning_rate": 2.43005325442583e-06, + "loss": 0.5612, + "step": 7477 + }, + { + "epoch": 0.3085747297185772, + "grad_norm": 2.4646513142909923, + "learning_rate": 2.42989595790775e-06, + "loss": 0.5422, + "step": 7478 + }, + { + "epoch": 0.3086159940579351, + "grad_norm": 2.4720016285970554, + "learning_rate": 2.429738644779402e-06, + "loss": 0.5747, + "step": 7479 + }, + { + "epoch": 0.3086572583972931, + "grad_norm": 4.844600936787659, + "learning_rate": 2.4295813150435956e-06, + "loss": 0.5222, + "step": 7480 + }, + { + "epoch": 0.308698522736651, + "grad_norm": 4.221125936534252, + "learning_rate": 2.429423968703142e-06, + "loss": 0.5121, + "step": 7481 + }, + { + "epoch": 0.3087397870760089, + "grad_norm": 3.322852914307926, + "learning_rate": 2.429266605760851e-06, + "loss": 0.5727, + "step": 7482 + }, + { + "epoch": 0.3087810514153668, + "grad_norm": 2.739561398221397, + "learning_rate": 2.4291092262195335e-06, + "loss": 0.5286, + "step": 7483 + }, + { + "epoch": 0.30882231575472474, + "grad_norm": 2.916528475941482, + "learning_rate": 2.428951830082001e-06, + "loss": 0.4885, + "step": 7484 + }, + { + "epoch": 0.3088635800940827, + "grad_norm": 3.9282192962490066, + "learning_rate": 2.4287944173510654e-06, + "loss": 0.5764, + "step": 7485 + }, + { + "epoch": 0.30890484443344063, + "grad_norm": 2.7849422204704344, + "learning_rate": 2.4286369880295376e-06, + "loss": 0.5921, + "step": 7486 + }, + { + "epoch": 0.30894610877279854, + "grad_norm": 2.9653311886791314, + "learning_rate": 2.4284795421202302e-06, + "loss": 0.5185, + "step": 7487 + }, + { + "epoch": 0.30898737311215646, + "grad_norm": 3.043530946943918, + "learning_rate": 2.4283220796259555e-06, + "loss": 0.553, + "step": 7488 + }, + { + "epoch": 0.3090286374515144, + "grad_norm": 6.6057321374629, + "learning_rate": 2.428164600549526e-06, + "loss": 0.5047, + "step": 7489 + }, + { + "epoch": 0.30906990179087235, + "grad_norm": 3.623699177366267, + "learning_rate": 2.428007104893755e-06, + "loss": 0.5746, + "step": 7490 + }, + { + "epoch": 0.30911116613023026, + "grad_norm": 3.4537450356224544, + "learning_rate": 2.427849592661456e-06, + "loss": 0.61, + "step": 7491 + }, + { + "epoch": 0.3091524304695882, + "grad_norm": 13.443219922591078, + "learning_rate": 2.4276920638554416e-06, + "loss": 0.51, + "step": 7492 + }, + { + "epoch": 0.3091936948089461, + "grad_norm": 2.4669384558423206, + "learning_rate": 2.427534518478526e-06, + "loss": 0.5963, + "step": 7493 + }, + { + "epoch": 0.309234959148304, + "grad_norm": 5.149075674762797, + "learning_rate": 2.427376956533524e-06, + "loss": 0.5603, + "step": 7494 + }, + { + "epoch": 0.309276223487662, + "grad_norm": 5.5100290672394765, + "learning_rate": 2.42721937802325e-06, + "loss": 0.557, + "step": 7495 + }, + { + "epoch": 0.3093174878270199, + "grad_norm": 3.0968889270773983, + "learning_rate": 2.4270617829505176e-06, + "loss": 0.561, + "step": 7496 + }, + { + "epoch": 0.3093587521663778, + "grad_norm": 2.6749010357965144, + "learning_rate": 2.4269041713181426e-06, + "loss": 0.5053, + "step": 7497 + }, + { + "epoch": 0.30940001650573573, + "grad_norm": 2.5965140594147775, + "learning_rate": 2.426746543128941e-06, + "loss": 0.5589, + "step": 7498 + }, + { + "epoch": 0.30944128084509365, + "grad_norm": 3.888858333292009, + "learning_rate": 2.4265888983857273e-06, + "loss": 0.5131, + "step": 7499 + }, + { + "epoch": 0.3094825451844516, + "grad_norm": 2.571786573425745, + "learning_rate": 2.4264312370913177e-06, + "loss": 0.5433, + "step": 7500 + }, + { + "epoch": 0.30952380952380953, + "grad_norm": 7.585630049737628, + "learning_rate": 2.426273559248529e-06, + "loss": 0.5135, + "step": 7501 + }, + { + "epoch": 0.30956507386316745, + "grad_norm": 4.472649821113481, + "learning_rate": 2.4261158648601775e-06, + "loss": 0.5852, + "step": 7502 + }, + { + "epoch": 0.30960633820252537, + "grad_norm": 3.549934008805254, + "learning_rate": 2.42595815392908e-06, + "loss": 0.5881, + "step": 7503 + }, + { + "epoch": 0.3096476025418833, + "grad_norm": 2.2795485094078063, + "learning_rate": 2.4258004264580527e-06, + "loss": 0.5262, + "step": 7504 + }, + { + "epoch": 0.30968886688124125, + "grad_norm": 6.5472039170901395, + "learning_rate": 2.4256426824499144e-06, + "loss": 0.5584, + "step": 7505 + }, + { + "epoch": 0.30973013122059917, + "grad_norm": 2.715402270817933, + "learning_rate": 2.425484921907482e-06, + "loss": 0.5417, + "step": 7506 + }, + { + "epoch": 0.3097713955599571, + "grad_norm": 3.941084641254052, + "learning_rate": 2.4253271448335737e-06, + "loss": 0.5526, + "step": 7507 + }, + { + "epoch": 0.309812659899315, + "grad_norm": 4.819258128517645, + "learning_rate": 2.4251693512310075e-06, + "loss": 0.4942, + "step": 7508 + }, + { + "epoch": 0.3098539242386729, + "grad_norm": 13.627623788254091, + "learning_rate": 2.425011541102603e-06, + "loss": 0.5729, + "step": 7509 + }, + { + "epoch": 0.3098951885780309, + "grad_norm": 2.4940436852864916, + "learning_rate": 2.424853714451178e-06, + "loss": 0.4995, + "step": 7510 + }, + { + "epoch": 0.3099364529173888, + "grad_norm": 3.2493959730222857, + "learning_rate": 2.4246958712795516e-06, + "loss": 0.5164, + "step": 7511 + }, + { + "epoch": 0.3099777172567467, + "grad_norm": 7.725640659445766, + "learning_rate": 2.424538011590544e-06, + "loss": 0.6086, + "step": 7512 + }, + { + "epoch": 0.31001898159610464, + "grad_norm": 9.267431459858704, + "learning_rate": 2.4243801353869743e-06, + "loss": 0.5103, + "step": 7513 + }, + { + "epoch": 0.31006024593546255, + "grad_norm": 3.0039217551827444, + "learning_rate": 2.4242222426716633e-06, + "loss": 0.548, + "step": 7514 + }, + { + "epoch": 0.3101015102748205, + "grad_norm": 2.2654520628390116, + "learning_rate": 2.4240643334474315e-06, + "loss": 0.5386, + "step": 7515 + }, + { + "epoch": 0.31014277461417844, + "grad_norm": 2.874548278890898, + "learning_rate": 2.4239064077170984e-06, + "loss": 0.5221, + "step": 7516 + }, + { + "epoch": 0.31018403895353636, + "grad_norm": 5.463185534004395, + "learning_rate": 2.423748465483486e-06, + "loss": 0.5547, + "step": 7517 + }, + { + "epoch": 0.31022530329289427, + "grad_norm": 2.332546610483865, + "learning_rate": 2.4235905067494146e-06, + "loss": 0.5586, + "step": 7518 + }, + { + "epoch": 0.3102665676322522, + "grad_norm": 4.943276635239979, + "learning_rate": 2.4234325315177064e-06, + "loss": 0.551, + "step": 7519 + }, + { + "epoch": 0.31030783197161016, + "grad_norm": 2.506070747106054, + "learning_rate": 2.4232745397911836e-06, + "loss": 0.575, + "step": 7520 + }, + { + "epoch": 0.3103490963109681, + "grad_norm": 2.531649195550814, + "learning_rate": 2.4231165315726676e-06, + "loss": 0.5696, + "step": 7521 + }, + { + "epoch": 0.310390360650326, + "grad_norm": 3.776591534719658, + "learning_rate": 2.422958506864981e-06, + "loss": 0.577, + "step": 7522 + }, + { + "epoch": 0.3104316249896839, + "grad_norm": 5.3040821872245205, + "learning_rate": 2.422800465670947e-06, + "loss": 0.5432, + "step": 7523 + }, + { + "epoch": 0.3104728893290418, + "grad_norm": 14.370241881195675, + "learning_rate": 2.422642407993387e-06, + "loss": 0.5886, + "step": 7524 + }, + { + "epoch": 0.3105141536683998, + "grad_norm": 5.482863154743763, + "learning_rate": 2.4224843338351265e-06, + "loss": 0.5386, + "step": 7525 + }, + { + "epoch": 0.3105554180077577, + "grad_norm": 2.44854008835658, + "learning_rate": 2.422326243198988e-06, + "loss": 0.4926, + "step": 7526 + }, + { + "epoch": 0.3105966823471156, + "grad_norm": 6.371831706292772, + "learning_rate": 2.4221681360877953e-06, + "loss": 0.5551, + "step": 7527 + }, + { + "epoch": 0.31063794668647354, + "grad_norm": 4.541088670469246, + "learning_rate": 2.4220100125043727e-06, + "loss": 0.5512, + "step": 7528 + }, + { + "epoch": 0.31067921102583146, + "grad_norm": 3.5050610578655497, + "learning_rate": 2.421851872451545e-06, + "loss": 0.5555, + "step": 7529 + }, + { + "epoch": 0.31072047536518943, + "grad_norm": 2.7129068462065224, + "learning_rate": 2.4216937159321367e-06, + "loss": 0.5576, + "step": 7530 + }, + { + "epoch": 0.31076173970454735, + "grad_norm": 2.9184356381099383, + "learning_rate": 2.421535542948973e-06, + "loss": 0.6136, + "step": 7531 + }, + { + "epoch": 0.31080300404390526, + "grad_norm": 5.610287239534076, + "learning_rate": 2.4213773535048788e-06, + "loss": 0.5204, + "step": 7532 + }, + { + "epoch": 0.3108442683832632, + "grad_norm": 8.161402809753675, + "learning_rate": 2.4212191476026808e-06, + "loss": 0.476, + "step": 7533 + }, + { + "epoch": 0.3108855327226211, + "grad_norm": 3.146113134554444, + "learning_rate": 2.4210609252452034e-06, + "loss": 0.5412, + "step": 7534 + }, + { + "epoch": 0.310926797061979, + "grad_norm": 3.201206816669266, + "learning_rate": 2.4209026864352743e-06, + "loss": 0.5123, + "step": 7535 + }, + { + "epoch": 0.310968061401337, + "grad_norm": 2.711121790910817, + "learning_rate": 2.420744431175719e-06, + "loss": 0.4683, + "step": 7536 + }, + { + "epoch": 0.3110093257406949, + "grad_norm": 82.64207998892762, + "learning_rate": 2.4205861594693656e-06, + "loss": 0.5434, + "step": 7537 + }, + { + "epoch": 0.3110505900800528, + "grad_norm": 8.486914614713768, + "learning_rate": 2.42042787131904e-06, + "loss": 0.5669, + "step": 7538 + }, + { + "epoch": 0.31109185441941073, + "grad_norm": 5.647727332846064, + "learning_rate": 2.42026956672757e-06, + "loss": 0.6025, + "step": 7539 + }, + { + "epoch": 0.31113311875876865, + "grad_norm": 3.602213822337891, + "learning_rate": 2.420111245697783e-06, + "loss": 0.5053, + "step": 7540 + }, + { + "epoch": 0.3111743830981266, + "grad_norm": 4.412788877294454, + "learning_rate": 2.419952908232508e-06, + "loss": 0.5422, + "step": 7541 + }, + { + "epoch": 0.31121564743748453, + "grad_norm": 3.239812874917074, + "learning_rate": 2.4197945543345726e-06, + "loss": 0.5597, + "step": 7542 + }, + { + "epoch": 0.31125691177684245, + "grad_norm": 22.62065551063839, + "learning_rate": 2.4196361840068055e-06, + "loss": 0.5579, + "step": 7543 + }, + { + "epoch": 0.31129817611620036, + "grad_norm": 3.733215976520678, + "learning_rate": 2.4194777972520356e-06, + "loss": 0.5255, + "step": 7544 + }, + { + "epoch": 0.3113394404555583, + "grad_norm": 8.32603426586659, + "learning_rate": 2.419319394073092e-06, + "loss": 0.5099, + "step": 7545 + }, + { + "epoch": 0.31138070479491625, + "grad_norm": 2.6879153423368884, + "learning_rate": 2.419160974472804e-06, + "loss": 0.5591, + "step": 7546 + }, + { + "epoch": 0.31142196913427417, + "grad_norm": 2.2369451048744464, + "learning_rate": 2.4190025384540018e-06, + "loss": 0.5822, + "step": 7547 + }, + { + "epoch": 0.3114632334736321, + "grad_norm": 3.529773237769835, + "learning_rate": 2.4188440860195153e-06, + "loss": 0.5015, + "step": 7548 + }, + { + "epoch": 0.31150449781299, + "grad_norm": 3.6244320271365695, + "learning_rate": 2.4186856171721747e-06, + "loss": 0.5448, + "step": 7549 + }, + { + "epoch": 0.3115457621523479, + "grad_norm": 6.762736613135897, + "learning_rate": 2.418527131914811e-06, + "loss": 0.5174, + "step": 7550 + }, + { + "epoch": 0.3115870264917059, + "grad_norm": 2.6917606555336806, + "learning_rate": 2.4183686302502545e-06, + "loss": 0.538, + "step": 7551 + }, + { + "epoch": 0.3116282908310638, + "grad_norm": 4.313417853140491, + "learning_rate": 2.4182101121813373e-06, + "loss": 0.4947, + "step": 7552 + }, + { + "epoch": 0.3116695551704217, + "grad_norm": 3.254473753478525, + "learning_rate": 2.4180515777108904e-06, + "loss": 0.5402, + "step": 7553 + }, + { + "epoch": 0.31171081950977964, + "grad_norm": 3.045748804683389, + "learning_rate": 2.4178930268417454e-06, + "loss": 0.5459, + "step": 7554 + }, + { + "epoch": 0.31175208384913755, + "grad_norm": 3.3510268781645776, + "learning_rate": 2.417734459576735e-06, + "loss": 0.5594, + "step": 7555 + }, + { + "epoch": 0.3117933481884955, + "grad_norm": 4.634209663877346, + "learning_rate": 2.4175758759186917e-06, + "loss": 0.5619, + "step": 7556 + }, + { + "epoch": 0.31183461252785344, + "grad_norm": 7.197974567720816, + "learning_rate": 2.417417275870447e-06, + "loss": 0.5357, + "step": 7557 + }, + { + "epoch": 0.31187587686721135, + "grad_norm": 6.7661579607880595, + "learning_rate": 2.417258659434835e-06, + "loss": 0.5067, + "step": 7558 + }, + { + "epoch": 0.31191714120656927, + "grad_norm": 4.661076527492218, + "learning_rate": 2.4171000266146893e-06, + "loss": 0.5547, + "step": 7559 + }, + { + "epoch": 0.3119584055459272, + "grad_norm": 5.075149758819966, + "learning_rate": 2.4169413774128423e-06, + "loss": 0.4933, + "step": 7560 + }, + { + "epoch": 0.31199966988528516, + "grad_norm": 5.311376588319034, + "learning_rate": 2.4167827118321285e-06, + "loss": 0.5274, + "step": 7561 + }, + { + "epoch": 0.3120409342246431, + "grad_norm": 3.0197455877031607, + "learning_rate": 2.4166240298753824e-06, + "loss": 0.5719, + "step": 7562 + }, + { + "epoch": 0.312082198564001, + "grad_norm": 4.351894435365431, + "learning_rate": 2.4164653315454377e-06, + "loss": 0.5206, + "step": 7563 + }, + { + "epoch": 0.3121234629033589, + "grad_norm": 2.658564917300824, + "learning_rate": 2.4163066168451296e-06, + "loss": 0.5784, + "step": 7564 + }, + { + "epoch": 0.3121647272427168, + "grad_norm": 3.8908980425681836, + "learning_rate": 2.4161478857772933e-06, + "loss": 0.6409, + "step": 7565 + }, + { + "epoch": 0.3122059915820748, + "grad_norm": 3.5416785825045767, + "learning_rate": 2.4159891383447636e-06, + "loss": 0.5599, + "step": 7566 + }, + { + "epoch": 0.3122472559214327, + "grad_norm": 3.4446973978079054, + "learning_rate": 2.4158303745503763e-06, + "loss": 0.5788, + "step": 7567 + }, + { + "epoch": 0.3122885202607906, + "grad_norm": 4.11710890414605, + "learning_rate": 2.415671594396968e-06, + "loss": 0.4868, + "step": 7568 + }, + { + "epoch": 0.31232978460014854, + "grad_norm": 3.7259176470242608, + "learning_rate": 2.415512797887374e-06, + "loss": 0.55, + "step": 7569 + }, + { + "epoch": 0.31237104893950646, + "grad_norm": 3.5044002137802384, + "learning_rate": 2.4153539850244314e-06, + "loss": 0.5477, + "step": 7570 + }, + { + "epoch": 0.31241231327886443, + "grad_norm": 3.406716304566372, + "learning_rate": 2.4151951558109765e-06, + "loss": 0.5499, + "step": 7571 + }, + { + "epoch": 0.31245357761822234, + "grad_norm": 5.063159328999626, + "learning_rate": 2.4150363102498467e-06, + "loss": 0.581, + "step": 7572 + }, + { + "epoch": 0.31249484195758026, + "grad_norm": 3.539119841495615, + "learning_rate": 2.414877448343879e-06, + "loss": 0.535, + "step": 7573 + }, + { + "epoch": 0.3125361062969382, + "grad_norm": 3.0437306906100283, + "learning_rate": 2.4147185700959123e-06, + "loss": 0.5723, + "step": 7574 + }, + { + "epoch": 0.3125773706362961, + "grad_norm": 3.025033420603958, + "learning_rate": 2.414559675508783e-06, + "loss": 0.5216, + "step": 7575 + }, + { + "epoch": 0.31261863497565406, + "grad_norm": 3.8190263724540987, + "learning_rate": 2.41440076458533e-06, + "loss": 0.5008, + "step": 7576 + }, + { + "epoch": 0.312659899315012, + "grad_norm": 3.3228421991014696, + "learning_rate": 2.4142418373283923e-06, + "loss": 0.5717, + "step": 7577 + }, + { + "epoch": 0.3127011636543699, + "grad_norm": 3.0033572432328195, + "learning_rate": 2.4140828937408082e-06, + "loss": 0.5267, + "step": 7578 + }, + { + "epoch": 0.3127424279937278, + "grad_norm": 17.038671472856265, + "learning_rate": 2.4139239338254164e-06, + "loss": 0.599, + "step": 7579 + }, + { + "epoch": 0.3127836923330857, + "grad_norm": 3.124835687382987, + "learning_rate": 2.4137649575850574e-06, + "loss": 0.4872, + "step": 7580 + }, + { + "epoch": 0.3128249566724437, + "grad_norm": 4.773575635978389, + "learning_rate": 2.4136059650225705e-06, + "loss": 0.5513, + "step": 7581 + }, + { + "epoch": 0.3128662210118016, + "grad_norm": 3.277685702830978, + "learning_rate": 2.4134469561407952e-06, + "loss": 0.5362, + "step": 7582 + }, + { + "epoch": 0.31290748535115953, + "grad_norm": 3.005704541958763, + "learning_rate": 2.413287930942572e-06, + "loss": 0.5234, + "step": 7583 + }, + { + "epoch": 0.31294874969051745, + "grad_norm": 18.073494355846, + "learning_rate": 2.413128889430742e-06, + "loss": 0.5171, + "step": 7584 + }, + { + "epoch": 0.31299001402987536, + "grad_norm": 5.521124713301717, + "learning_rate": 2.4129698316081455e-06, + "loss": 0.5167, + "step": 7585 + }, + { + "epoch": 0.31303127836923333, + "grad_norm": 92.53817381786466, + "learning_rate": 2.412810757477624e-06, + "loss": 0.5747, + "step": 7586 + }, + { + "epoch": 0.31307254270859125, + "grad_norm": 2.545804888786934, + "learning_rate": 2.412651667042019e-06, + "loss": 0.5127, + "step": 7587 + }, + { + "epoch": 0.31311380704794917, + "grad_norm": 3.0938790608086566, + "learning_rate": 2.412492560304172e-06, + "loss": 0.5526, + "step": 7588 + }, + { + "epoch": 0.3131550713873071, + "grad_norm": 2.825017157228484, + "learning_rate": 2.4123334372669253e-06, + "loss": 0.5549, + "step": 7589 + }, + { + "epoch": 0.313196335726665, + "grad_norm": 2.3540426708032838, + "learning_rate": 2.412174297933121e-06, + "loss": 0.529, + "step": 7590 + }, + { + "epoch": 0.31323760006602297, + "grad_norm": 7.350438920541301, + "learning_rate": 2.412015142305602e-06, + "loss": 0.5361, + "step": 7591 + }, + { + "epoch": 0.3132788644053809, + "grad_norm": 2.7666751873740627, + "learning_rate": 2.4118559703872104e-06, + "loss": 0.4812, + "step": 7592 + }, + { + "epoch": 0.3133201287447388, + "grad_norm": 4.72180376112005, + "learning_rate": 2.411696782180791e-06, + "loss": 0.5326, + "step": 7593 + }, + { + "epoch": 0.3133613930840967, + "grad_norm": 4.83174077992989, + "learning_rate": 2.4115375776891858e-06, + "loss": 0.5621, + "step": 7594 + }, + { + "epoch": 0.31340265742345463, + "grad_norm": 2.326319542945992, + "learning_rate": 2.411378356915239e-06, + "loss": 0.5245, + "step": 7595 + }, + { + "epoch": 0.3134439217628126, + "grad_norm": 2.595943679881337, + "learning_rate": 2.4112191198617954e-06, + "loss": 0.5526, + "step": 7596 + }, + { + "epoch": 0.3134851861021705, + "grad_norm": 4.7916970301622275, + "learning_rate": 2.4110598665316982e-06, + "loss": 0.5779, + "step": 7597 + }, + { + "epoch": 0.31352645044152844, + "grad_norm": 5.837681772414875, + "learning_rate": 2.410900596927793e-06, + "loss": 0.5491, + "step": 7598 + }, + { + "epoch": 0.31356771478088635, + "grad_norm": 2.838821959487851, + "learning_rate": 2.410741311052924e-06, + "loss": 0.5566, + "step": 7599 + }, + { + "epoch": 0.31360897912024427, + "grad_norm": 3.634288556579592, + "learning_rate": 2.4105820089099372e-06, + "loss": 0.5037, + "step": 7600 + }, + { + "epoch": 0.3136502434596022, + "grad_norm": 3.1514489785419486, + "learning_rate": 2.410422690501678e-06, + "loss": 0.5735, + "step": 7601 + }, + { + "epoch": 0.31369150779896016, + "grad_norm": 3.5587445680212726, + "learning_rate": 2.410263355830992e-06, + "loss": 0.513, + "step": 7602 + }, + { + "epoch": 0.3137327721383181, + "grad_norm": 7.981687046836603, + "learning_rate": 2.4101040049007252e-06, + "loss": 0.5242, + "step": 7603 + }, + { + "epoch": 0.313774036477676, + "grad_norm": 4.150772040536481, + "learning_rate": 2.409944637713724e-06, + "loss": 0.541, + "step": 7604 + }, + { + "epoch": 0.3138153008170339, + "grad_norm": 1.8764180878903076, + "learning_rate": 2.409785254272835e-06, + "loss": 0.5754, + "step": 7605 + }, + { + "epoch": 0.3138565651563918, + "grad_norm": 2.879880802477735, + "learning_rate": 2.409625854580906e-06, + "loss": 0.511, + "step": 7606 + }, + { + "epoch": 0.3138978294957498, + "grad_norm": 5.418883577226338, + "learning_rate": 2.4094664386407834e-06, + "loss": 0.4978, + "step": 7607 + }, + { + "epoch": 0.3139390938351077, + "grad_norm": 3.4349915045035124, + "learning_rate": 2.409307006455315e-06, + "loss": 0.5577, + "step": 7608 + }, + { + "epoch": 0.3139803581744656, + "grad_norm": 4.7043775222246875, + "learning_rate": 2.4091475580273493e-06, + "loss": 0.5457, + "step": 7609 + }, + { + "epoch": 0.31402162251382354, + "grad_norm": 2.6609484653158693, + "learning_rate": 2.4089880933597332e-06, + "loss": 0.5939, + "step": 7610 + }, + { + "epoch": 0.31406288685318146, + "grad_norm": 3.9537564919755757, + "learning_rate": 2.408828612455316e-06, + "loss": 0.5398, + "step": 7611 + }, + { + "epoch": 0.3141041511925394, + "grad_norm": 3.0163141144126446, + "learning_rate": 2.4086691153169464e-06, + "loss": 0.5363, + "step": 7612 + }, + { + "epoch": 0.31414541553189734, + "grad_norm": 4.768919095605631, + "learning_rate": 2.408509601947473e-06, + "loss": 0.4808, + "step": 7613 + }, + { + "epoch": 0.31418667987125526, + "grad_norm": 2.6748162404495814, + "learning_rate": 2.4083500723497457e-06, + "loss": 0.5655, + "step": 7614 + }, + { + "epoch": 0.3142279442106132, + "grad_norm": 3.217793121099856, + "learning_rate": 2.4081905265266133e-06, + "loss": 0.5183, + "step": 7615 + }, + { + "epoch": 0.3142692085499711, + "grad_norm": 3.0803530979544527, + "learning_rate": 2.408030964480927e-06, + "loss": 0.542, + "step": 7616 + }, + { + "epoch": 0.31431047288932906, + "grad_norm": 5.388112906280268, + "learning_rate": 2.4078713862155348e-06, + "loss": 0.5543, + "step": 7617 + }, + { + "epoch": 0.314351737228687, + "grad_norm": 2.196619372093215, + "learning_rate": 2.407711791733289e-06, + "loss": 0.499, + "step": 7618 + }, + { + "epoch": 0.3143930015680449, + "grad_norm": 4.277219674245368, + "learning_rate": 2.4075521810370404e-06, + "loss": 0.5242, + "step": 7619 + }, + { + "epoch": 0.3144342659074028, + "grad_norm": 2.668735960568214, + "learning_rate": 2.407392554129639e-06, + "loss": 0.5471, + "step": 7620 + }, + { + "epoch": 0.3144755302467607, + "grad_norm": 3.8567135850863177, + "learning_rate": 2.407232911013937e-06, + "loss": 0.5489, + "step": 7621 + }, + { + "epoch": 0.3145167945861187, + "grad_norm": 4.930286020305355, + "learning_rate": 2.4070732516927856e-06, + "loss": 0.5661, + "step": 7622 + }, + { + "epoch": 0.3145580589254766, + "grad_norm": 2.1829365794764466, + "learning_rate": 2.4069135761690363e-06, + "loss": 0.5358, + "step": 7623 + }, + { + "epoch": 0.31459932326483453, + "grad_norm": 3.368917130328067, + "learning_rate": 2.4067538844455423e-06, + "loss": 0.4957, + "step": 7624 + }, + { + "epoch": 0.31464058760419245, + "grad_norm": 2.690599803523067, + "learning_rate": 2.4065941765251554e-06, + "loss": 0.5596, + "step": 7625 + }, + { + "epoch": 0.31468185194355036, + "grad_norm": 5.79064995787374, + "learning_rate": 2.4064344524107284e-06, + "loss": 0.5072, + "step": 7626 + }, + { + "epoch": 0.31472311628290833, + "grad_norm": 2.161476162349107, + "learning_rate": 2.406274712105115e-06, + "loss": 0.5594, + "step": 7627 + }, + { + "epoch": 0.31476438062226625, + "grad_norm": 2.6507946264466993, + "learning_rate": 2.4061149556111676e-06, + "loss": 0.5665, + "step": 7628 + }, + { + "epoch": 0.31480564496162416, + "grad_norm": 3.188300175247391, + "learning_rate": 2.4059551829317407e-06, + "loss": 0.5659, + "step": 7629 + }, + { + "epoch": 0.3148469093009821, + "grad_norm": 4.057855283914741, + "learning_rate": 2.405795394069688e-06, + "loss": 0.5029, + "step": 7630 + }, + { + "epoch": 0.31488817364034, + "grad_norm": 2.438169224542742, + "learning_rate": 2.405635589027863e-06, + "loss": 0.5427, + "step": 7631 + }, + { + "epoch": 0.31492943797969797, + "grad_norm": 2.7059070371968144, + "learning_rate": 2.405475767809121e-06, + "loss": 0.5793, + "step": 7632 + }, + { + "epoch": 0.3149707023190559, + "grad_norm": 5.112544269123498, + "learning_rate": 2.405315930416317e-06, + "loss": 0.5643, + "step": 7633 + }, + { + "epoch": 0.3150119666584138, + "grad_norm": 3.079330513226491, + "learning_rate": 2.4051560768523055e-06, + "loss": 0.5494, + "step": 7634 + }, + { + "epoch": 0.3150532309977717, + "grad_norm": 2.360676572917845, + "learning_rate": 2.404996207119942e-06, + "loss": 0.5166, + "step": 7635 + }, + { + "epoch": 0.31509449533712963, + "grad_norm": 2.695613663993375, + "learning_rate": 2.4048363212220825e-06, + "loss": 0.5684, + "step": 7636 + }, + { + "epoch": 0.3151357596764876, + "grad_norm": 6.4839879034019265, + "learning_rate": 2.4046764191615823e-06, + "loss": 0.5618, + "step": 7637 + }, + { + "epoch": 0.3151770240158455, + "grad_norm": 3.633747844129276, + "learning_rate": 2.4045165009412986e-06, + "loss": 0.5611, + "step": 7638 + }, + { + "epoch": 0.31521828835520344, + "grad_norm": 1.7754130096739578, + "learning_rate": 2.404356566564087e-06, + "loss": 0.5333, + "step": 7639 + }, + { + "epoch": 0.31525955269456135, + "grad_norm": 4.821229111790244, + "learning_rate": 2.4041966160328054e-06, + "loss": 0.6044, + "step": 7640 + }, + { + "epoch": 0.31530081703391927, + "grad_norm": 3.9955054089141986, + "learning_rate": 2.4040366493503104e-06, + "loss": 0.4455, + "step": 7641 + }, + { + "epoch": 0.31534208137327724, + "grad_norm": 3.4875492102028254, + "learning_rate": 2.403876666519459e-06, + "loss": 0.5225, + "step": 7642 + }, + { + "epoch": 0.31538334571263515, + "grad_norm": 14.50549536583183, + "learning_rate": 2.4037166675431086e-06, + "loss": 0.5342, + "step": 7643 + }, + { + "epoch": 0.31542461005199307, + "grad_norm": 3.8214362902968695, + "learning_rate": 2.403556652424118e-06, + "loss": 0.5704, + "step": 7644 + }, + { + "epoch": 0.315465874391351, + "grad_norm": 5.70608492288992, + "learning_rate": 2.4033966211653456e-06, + "loss": 0.5468, + "step": 7645 + }, + { + "epoch": 0.3155071387307089, + "grad_norm": 37.74617525835007, + "learning_rate": 2.4032365737696494e-06, + "loss": 0.5874, + "step": 7646 + }, + { + "epoch": 0.3155484030700669, + "grad_norm": 2.161287001873131, + "learning_rate": 2.4030765102398882e-06, + "loss": 0.5589, + "step": 7647 + }, + { + "epoch": 0.3155896674094248, + "grad_norm": 3.8837009447870825, + "learning_rate": 2.4029164305789216e-06, + "loss": 0.55, + "step": 7648 + }, + { + "epoch": 0.3156309317487827, + "grad_norm": 2.8564525606446907, + "learning_rate": 2.402756334789609e-06, + "loss": 0.5165, + "step": 7649 + }, + { + "epoch": 0.3156721960881406, + "grad_norm": 3.790799005314525, + "learning_rate": 2.4025962228748094e-06, + "loss": 0.5644, + "step": 7650 + }, + { + "epoch": 0.31571346042749854, + "grad_norm": 2.417241647303526, + "learning_rate": 2.4024360948373835e-06, + "loss": 0.5562, + "step": 7651 + }, + { + "epoch": 0.3157547247668565, + "grad_norm": 2.491044118787608, + "learning_rate": 2.4022759506801917e-06, + "loss": 0.5678, + "step": 7652 + }, + { + "epoch": 0.3157959891062144, + "grad_norm": 2.483306409421144, + "learning_rate": 2.4021157904060944e-06, + "loss": 0.5279, + "step": 7653 + }, + { + "epoch": 0.31583725344557234, + "grad_norm": 5.987485760633623, + "learning_rate": 2.401955614017952e-06, + "loss": 0.5568, + "step": 7654 + }, + { + "epoch": 0.31587851778493026, + "grad_norm": 3.215730214673241, + "learning_rate": 2.401795421518626e-06, + "loss": 0.5637, + "step": 7655 + }, + { + "epoch": 0.3159197821242882, + "grad_norm": 2.913910992874405, + "learning_rate": 2.401635212910978e-06, + "loss": 0.574, + "step": 7656 + }, + { + "epoch": 0.31596104646364614, + "grad_norm": 6.34789189049969, + "learning_rate": 2.401474988197869e-06, + "loss": 0.5255, + "step": 7657 + }, + { + "epoch": 0.31600231080300406, + "grad_norm": 3.3421060538999163, + "learning_rate": 2.4013147473821622e-06, + "loss": 0.562, + "step": 7658 + }, + { + "epoch": 0.316043575142362, + "grad_norm": 3.509084319417238, + "learning_rate": 2.401154490466719e-06, + "loss": 0.5906, + "step": 7659 + }, + { + "epoch": 0.3160848394817199, + "grad_norm": 5.223462764772498, + "learning_rate": 2.4009942174544023e-06, + "loss": 0.5803, + "step": 7660 + }, + { + "epoch": 0.3161261038210778, + "grad_norm": 3.9924880470105353, + "learning_rate": 2.4008339283480753e-06, + "loss": 0.5334, + "step": 7661 + }, + { + "epoch": 0.3161673681604357, + "grad_norm": 6.094082751322463, + "learning_rate": 2.4006736231506e-06, + "loss": 0.4963, + "step": 7662 + }, + { + "epoch": 0.3162086324997937, + "grad_norm": 2.656425892424436, + "learning_rate": 2.400513301864841e-06, + "loss": 0.5582, + "step": 7663 + }, + { + "epoch": 0.3162498968391516, + "grad_norm": 4.167838572397496, + "learning_rate": 2.400352964493662e-06, + "loss": 0.5549, + "step": 7664 + }, + { + "epoch": 0.31629116117850953, + "grad_norm": 2.865603807766753, + "learning_rate": 2.4001926110399265e-06, + "loss": 0.5718, + "step": 7665 + }, + { + "epoch": 0.31633242551786744, + "grad_norm": 3.9899000559848226, + "learning_rate": 2.400032241506499e-06, + "loss": 0.5776, + "step": 7666 + }, + { + "epoch": 0.31637368985722536, + "grad_norm": 12.627728137452543, + "learning_rate": 2.399871855896244e-06, + "loss": 0.5845, + "step": 7667 + }, + { + "epoch": 0.31641495419658333, + "grad_norm": 2.7115774654866867, + "learning_rate": 2.399711454212027e-06, + "loss": 0.5673, + "step": 7668 + }, + { + "epoch": 0.31645621853594125, + "grad_norm": 35.551435552613775, + "learning_rate": 2.3995510364567122e-06, + "loss": 0.5208, + "step": 7669 + }, + { + "epoch": 0.31649748287529916, + "grad_norm": 3.6673569959330115, + "learning_rate": 2.3993906026331654e-06, + "loss": 0.4917, + "step": 7670 + }, + { + "epoch": 0.3165387472146571, + "grad_norm": 3.4538864204019992, + "learning_rate": 2.399230152744253e-06, + "loss": 0.5156, + "step": 7671 + }, + { + "epoch": 0.316580011554015, + "grad_norm": 2.780160379222939, + "learning_rate": 2.3990696867928406e-06, + "loss": 0.5479, + "step": 7672 + }, + { + "epoch": 0.31662127589337297, + "grad_norm": 5.9586567884857455, + "learning_rate": 2.3989092047817944e-06, + "loss": 0.5427, + "step": 7673 + }, + { + "epoch": 0.3166625402327309, + "grad_norm": 2.737373018809148, + "learning_rate": 2.398748706713981e-06, + "loss": 0.517, + "step": 7674 + }, + { + "epoch": 0.3167038045720888, + "grad_norm": 3.0674754974159977, + "learning_rate": 2.3985881925922668e-06, + "loss": 0.5206, + "step": 7675 + }, + { + "epoch": 0.3167450689114467, + "grad_norm": 3.520598049341735, + "learning_rate": 2.39842766241952e-06, + "loss": 0.5333, + "step": 7676 + }, + { + "epoch": 0.31678633325080463, + "grad_norm": 4.912276101975348, + "learning_rate": 2.3982671161986077e-06, + "loss": 0.5668, + "step": 7677 + }, + { + "epoch": 0.3168275975901626, + "grad_norm": 4.995627338183538, + "learning_rate": 2.3981065539323977e-06, + "loss": 0.5502, + "step": 7678 + }, + { + "epoch": 0.3168688619295205, + "grad_norm": 4.2986608344724155, + "learning_rate": 2.397945975623758e-06, + "loss": 0.5452, + "step": 7679 + }, + { + "epoch": 0.31691012626887843, + "grad_norm": 5.150371110693693, + "learning_rate": 2.3977853812755563e-06, + "loss": 0.5681, + "step": 7680 + }, + { + "epoch": 0.31695139060823635, + "grad_norm": 2.1588509095909787, + "learning_rate": 2.3976247708906623e-06, + "loss": 0.6178, + "step": 7681 + }, + { + "epoch": 0.31699265494759427, + "grad_norm": 5.227556907503059, + "learning_rate": 2.397464144471944e-06, + "loss": 0.5316, + "step": 7682 + }, + { + "epoch": 0.31703391928695224, + "grad_norm": 5.206162020862715, + "learning_rate": 2.397303502022271e-06, + "loss": 0.6315, + "step": 7683 + }, + { + "epoch": 0.31707518362631015, + "grad_norm": 3.050436467943373, + "learning_rate": 2.3971428435445134e-06, + "loss": 0.5636, + "step": 7684 + }, + { + "epoch": 0.31711644796566807, + "grad_norm": 3.0931584442726163, + "learning_rate": 2.3969821690415394e-06, + "loss": 0.5772, + "step": 7685 + }, + { + "epoch": 0.317157712305026, + "grad_norm": 4.646380664011278, + "learning_rate": 2.3968214785162207e-06, + "loss": 0.5596, + "step": 7686 + }, + { + "epoch": 0.3171989766443839, + "grad_norm": 2.5852991630018556, + "learning_rate": 2.3966607719714265e-06, + "loss": 0.5703, + "step": 7687 + }, + { + "epoch": 0.3172402409837419, + "grad_norm": 6.013949268930253, + "learning_rate": 2.396500049410028e-06, + "loss": 0.5053, + "step": 7688 + }, + { + "epoch": 0.3172815053230998, + "grad_norm": 4.054918984048671, + "learning_rate": 2.3963393108348957e-06, + "loss": 0.5206, + "step": 7689 + }, + { + "epoch": 0.3173227696624577, + "grad_norm": 1.98978923306608, + "learning_rate": 2.396178556248901e-06, + "loss": 0.5245, + "step": 7690 + }, + { + "epoch": 0.3173640340018156, + "grad_norm": 2.5429173720226315, + "learning_rate": 2.3960177856549153e-06, + "loss": 0.5283, + "step": 7691 + }, + { + "epoch": 0.31740529834117354, + "grad_norm": 15.756747281205001, + "learning_rate": 2.3958569990558108e-06, + "loss": 0.5253, + "step": 7692 + }, + { + "epoch": 0.3174465626805315, + "grad_norm": 3.4066276580010064, + "learning_rate": 2.395696196454459e-06, + "loss": 0.5597, + "step": 7693 + }, + { + "epoch": 0.3174878270198894, + "grad_norm": 2.789008809525549, + "learning_rate": 2.3955353778537326e-06, + "loss": 0.5461, + "step": 7694 + }, + { + "epoch": 0.31752909135924734, + "grad_norm": 12.13755608125341, + "learning_rate": 2.3953745432565037e-06, + "loss": 0.5346, + "step": 7695 + }, + { + "epoch": 0.31757035569860526, + "grad_norm": 2.7149231920158856, + "learning_rate": 2.395213692665646e-06, + "loss": 0.5231, + "step": 7696 + }, + { + "epoch": 0.31761162003796317, + "grad_norm": 10.469967446625931, + "learning_rate": 2.395052826084032e-06, + "loss": 0.5262, + "step": 7697 + }, + { + "epoch": 0.31765288437732114, + "grad_norm": 3.6189473495370885, + "learning_rate": 2.3948919435145355e-06, + "loss": 0.5551, + "step": 7698 + }, + { + "epoch": 0.31769414871667906, + "grad_norm": 9.855399116497441, + "learning_rate": 2.3947310449600306e-06, + "loss": 0.5324, + "step": 7699 + }, + { + "epoch": 0.317735413056037, + "grad_norm": 3.86750934842021, + "learning_rate": 2.394570130423391e-06, + "loss": 0.6066, + "step": 7700 + }, + { + "epoch": 0.3177766773953949, + "grad_norm": 4.596137591565918, + "learning_rate": 2.39440919990749e-06, + "loss": 0.5443, + "step": 7701 + }, + { + "epoch": 0.3178179417347528, + "grad_norm": 3.591488509612872, + "learning_rate": 2.3942482534152036e-06, + "loss": 0.5298, + "step": 7702 + }, + { + "epoch": 0.3178592060741108, + "grad_norm": 15.895926448087105, + "learning_rate": 2.394087290949407e-06, + "loss": 0.5498, + "step": 7703 + }, + { + "epoch": 0.3179004704134687, + "grad_norm": 2.780434324415639, + "learning_rate": 2.393926312512974e-06, + "loss": 0.5412, + "step": 7704 + }, + { + "epoch": 0.3179417347528266, + "grad_norm": 14.194926150427648, + "learning_rate": 2.393765318108781e-06, + "loss": 0.512, + "step": 7705 + }, + { + "epoch": 0.3179829990921845, + "grad_norm": 2.9670764788031843, + "learning_rate": 2.393604307739704e-06, + "loss": 0.5154, + "step": 7706 + }, + { + "epoch": 0.31802426343154244, + "grad_norm": 7.592182544514257, + "learning_rate": 2.393443281408618e-06, + "loss": 0.5193, + "step": 7707 + }, + { + "epoch": 0.3180655277709004, + "grad_norm": 2.7226649858417007, + "learning_rate": 2.3932822391184e-06, + "loss": 0.5458, + "step": 7708 + }, + { + "epoch": 0.31810679211025833, + "grad_norm": 3.50907579310767, + "learning_rate": 2.393121180871927e-06, + "loss": 0.5197, + "step": 7709 + }, + { + "epoch": 0.31814805644961625, + "grad_norm": 2.5313103508445627, + "learning_rate": 2.392960106672075e-06, + "loss": 0.5085, + "step": 7710 + }, + { + "epoch": 0.31818932078897416, + "grad_norm": 24.789846521392885, + "learning_rate": 2.3927990165217223e-06, + "loss": 0.4778, + "step": 7711 + }, + { + "epoch": 0.3182305851283321, + "grad_norm": 5.322194647120891, + "learning_rate": 2.392637910423745e-06, + "loss": 0.5659, + "step": 7712 + }, + { + "epoch": 0.31827184946769005, + "grad_norm": 7.446184610023616, + "learning_rate": 2.392476788381022e-06, + "loss": 0.5389, + "step": 7713 + }, + { + "epoch": 0.31831311380704796, + "grad_norm": 41.577876996757844, + "learning_rate": 2.3923156503964307e-06, + "loss": 0.5691, + "step": 7714 + }, + { + "epoch": 0.3183543781464059, + "grad_norm": 2.323191351486838, + "learning_rate": 2.3921544964728496e-06, + "loss": 0.5146, + "step": 7715 + }, + { + "epoch": 0.3183956424857638, + "grad_norm": 4.274644099992167, + "learning_rate": 2.391993326613158e-06, + "loss": 0.5556, + "step": 7716 + }, + { + "epoch": 0.3184369068251217, + "grad_norm": 3.773237683990661, + "learning_rate": 2.3918321408202335e-06, + "loss": 0.5048, + "step": 7717 + }, + { + "epoch": 0.3184781711644797, + "grad_norm": 2.8661234199202474, + "learning_rate": 2.3916709390969557e-06, + "loss": 0.4885, + "step": 7718 + }, + { + "epoch": 0.3185194355038376, + "grad_norm": 5.097459117662689, + "learning_rate": 2.391509721446205e-06, + "loss": 0.5649, + "step": 7719 + }, + { + "epoch": 0.3185606998431955, + "grad_norm": 3.04355938879557, + "learning_rate": 2.39134848787086e-06, + "loss": 0.5663, + "step": 7720 + }, + { + "epoch": 0.31860196418255343, + "grad_norm": 5.166473147457708, + "learning_rate": 2.3911872383738017e-06, + "loss": 0.4965, + "step": 7721 + }, + { + "epoch": 0.31864322852191135, + "grad_norm": 4.5768243734752465, + "learning_rate": 2.3910259729579094e-06, + "loss": 0.5276, + "step": 7722 + }, + { + "epoch": 0.31868449286126926, + "grad_norm": 2.6705249358360326, + "learning_rate": 2.3908646916260647e-06, + "loss": 0.519, + "step": 7723 + }, + { + "epoch": 0.31872575720062724, + "grad_norm": 2.4118394905686005, + "learning_rate": 2.3907033943811475e-06, + "loss": 0.5289, + "step": 7724 + }, + { + "epoch": 0.31876702153998515, + "grad_norm": 5.6690962425944775, + "learning_rate": 2.3905420812260395e-06, + "loss": 0.6044, + "step": 7725 + }, + { + "epoch": 0.31880828587934307, + "grad_norm": 4.510941717819847, + "learning_rate": 2.3903807521636227e-06, + "loss": 0.5721, + "step": 7726 + }, + { + "epoch": 0.318849550218701, + "grad_norm": 2.715087280599441, + "learning_rate": 2.3902194071967774e-06, + "loss": 0.5206, + "step": 7727 + }, + { + "epoch": 0.3188908145580589, + "grad_norm": 2.6787585580599598, + "learning_rate": 2.3900580463283873e-06, + "loss": 0.5401, + "step": 7728 + }, + { + "epoch": 0.31893207889741687, + "grad_norm": 4.200808197407663, + "learning_rate": 2.3898966695613337e-06, + "loss": 0.5216, + "step": 7729 + }, + { + "epoch": 0.3189733432367748, + "grad_norm": 3.062750033553838, + "learning_rate": 2.389735276898499e-06, + "loss": 0.5584, + "step": 7730 + }, + { + "epoch": 0.3190146075761327, + "grad_norm": 4.196066451790405, + "learning_rate": 2.389573868342767e-06, + "loss": 0.5217, + "step": 7731 + }, + { + "epoch": 0.3190558719154906, + "grad_norm": 6.012834335904486, + "learning_rate": 2.38941244389702e-06, + "loss": 0.5933, + "step": 7732 + }, + { + "epoch": 0.31909713625484853, + "grad_norm": 2.934266608261887, + "learning_rate": 2.389251003564142e-06, + "loss": 0.5472, + "step": 7733 + }, + { + "epoch": 0.3191384005942065, + "grad_norm": 8.727733394783785, + "learning_rate": 2.3890895473470167e-06, + "loss": 0.5721, + "step": 7734 + }, + { + "epoch": 0.3191796649335644, + "grad_norm": 6.36281732686277, + "learning_rate": 2.3889280752485275e-06, + "loss": 0.5039, + "step": 7735 + }, + { + "epoch": 0.31922092927292234, + "grad_norm": 5.389335129544146, + "learning_rate": 2.388766587271559e-06, + "loss": 0.5637, + "step": 7736 + }, + { + "epoch": 0.31926219361228025, + "grad_norm": 6.932076245695806, + "learning_rate": 2.388605083418996e-06, + "loss": 0.6024, + "step": 7737 + }, + { + "epoch": 0.31930345795163817, + "grad_norm": 2.657725143888827, + "learning_rate": 2.388443563693724e-06, + "loss": 0.5697, + "step": 7738 + }, + { + "epoch": 0.31934472229099614, + "grad_norm": 3.4556984421934023, + "learning_rate": 2.388282028098627e-06, + "loss": 0.573, + "step": 7739 + }, + { + "epoch": 0.31938598663035406, + "grad_norm": 4.020286859872219, + "learning_rate": 2.3881204766365904e-06, + "loss": 0.5409, + "step": 7740 + }, + { + "epoch": 0.319427250969712, + "grad_norm": 7.126748456273476, + "learning_rate": 2.3879589093105004e-06, + "loss": 0.5313, + "step": 7741 + }, + { + "epoch": 0.3194685153090699, + "grad_norm": 1.8423442011280613, + "learning_rate": 2.3877973261232434e-06, + "loss": 0.5118, + "step": 7742 + }, + { + "epoch": 0.3195097796484278, + "grad_norm": 2.34671600644246, + "learning_rate": 2.387635727077705e-06, + "loss": 0.5138, + "step": 7743 + }, + { + "epoch": 0.3195510439877858, + "grad_norm": 2.9272676747284185, + "learning_rate": 2.3874741121767717e-06, + "loss": 0.5131, + "step": 7744 + }, + { + "epoch": 0.3195923083271437, + "grad_norm": 3.754938482648286, + "learning_rate": 2.3873124814233312e-06, + "loss": 0.5048, + "step": 7745 + }, + { + "epoch": 0.3196335726665016, + "grad_norm": 4.360801184139836, + "learning_rate": 2.3871508348202696e-06, + "loss": 0.5322, + "step": 7746 + }, + { + "epoch": 0.3196748370058595, + "grad_norm": 2.3768433134036235, + "learning_rate": 2.386989172370475e-06, + "loss": 0.5325, + "step": 7747 + }, + { + "epoch": 0.31971610134521744, + "grad_norm": 3.848250286002039, + "learning_rate": 2.386827494076835e-06, + "loss": 0.5922, + "step": 7748 + }, + { + "epoch": 0.3197573656845754, + "grad_norm": 5.204386540406073, + "learning_rate": 2.3866657999422374e-06, + "loss": 0.5785, + "step": 7749 + }, + { + "epoch": 0.31979863002393333, + "grad_norm": 4.140536066690652, + "learning_rate": 2.38650408996957e-06, + "loss": 0.6183, + "step": 7750 + }, + { + "epoch": 0.31983989436329124, + "grad_norm": 3.4890072635896563, + "learning_rate": 2.386342364161722e-06, + "loss": 0.5188, + "step": 7751 + }, + { + "epoch": 0.31988115870264916, + "grad_norm": 2.2210059806433162, + "learning_rate": 2.386180622521583e-06, + "loss": 0.4896, + "step": 7752 + }, + { + "epoch": 0.3199224230420071, + "grad_norm": 2.703569669443202, + "learning_rate": 2.3860188650520403e-06, + "loss": 0.5222, + "step": 7753 + }, + { + "epoch": 0.31996368738136505, + "grad_norm": 24.29709312913122, + "learning_rate": 2.3858570917559847e-06, + "loss": 0.5477, + "step": 7754 + }, + { + "epoch": 0.32000495172072296, + "grad_norm": 2.7213999699183278, + "learning_rate": 2.3856953026363047e-06, + "loss": 0.5317, + "step": 7755 + }, + { + "epoch": 0.3200462160600809, + "grad_norm": 4.993518106483972, + "learning_rate": 2.3855334976958915e-06, + "loss": 0.5223, + "step": 7756 + }, + { + "epoch": 0.3200874803994388, + "grad_norm": 2.4352240476422913, + "learning_rate": 2.3853716769376344e-06, + "loss": 0.576, + "step": 7757 + }, + { + "epoch": 0.3201287447387967, + "grad_norm": 3.2066412786364413, + "learning_rate": 2.3852098403644247e-06, + "loss": 0.5677, + "step": 7758 + }, + { + "epoch": 0.3201700090781547, + "grad_norm": 4.127828748937613, + "learning_rate": 2.3850479879791524e-06, + "loss": 0.5379, + "step": 7759 + }, + { + "epoch": 0.3202112734175126, + "grad_norm": 4.409651998558199, + "learning_rate": 2.3848861197847095e-06, + "loss": 0.6146, + "step": 7760 + }, + { + "epoch": 0.3202525377568705, + "grad_norm": 2.295730086167606, + "learning_rate": 2.3847242357839863e-06, + "loss": 0.5004, + "step": 7761 + }, + { + "epoch": 0.32029380209622843, + "grad_norm": 3.0373246792140334, + "learning_rate": 2.384562335979875e-06, + "loss": 0.5535, + "step": 7762 + }, + { + "epoch": 0.32033506643558635, + "grad_norm": 2.798216101580331, + "learning_rate": 2.384400420375268e-06, + "loss": 0.5684, + "step": 7763 + }, + { + "epoch": 0.3203763307749443, + "grad_norm": 3.2003549129852544, + "learning_rate": 2.3842384889730565e-06, + "loss": 0.5595, + "step": 7764 + }, + { + "epoch": 0.32041759511430223, + "grad_norm": 11.780989465143755, + "learning_rate": 2.3840765417761343e-06, + "loss": 0.5431, + "step": 7765 + }, + { + "epoch": 0.32045885945366015, + "grad_norm": 5.6219865312531665, + "learning_rate": 2.383914578787393e-06, + "loss": 0.5848, + "step": 7766 + }, + { + "epoch": 0.32050012379301807, + "grad_norm": 4.398167831912567, + "learning_rate": 2.383752600009726e-06, + "loss": 0.5645, + "step": 7767 + }, + { + "epoch": 0.320541388132376, + "grad_norm": 3.5492004264223085, + "learning_rate": 2.383590605446027e-06, + "loss": 0.5521, + "step": 7768 + }, + { + "epoch": 0.32058265247173395, + "grad_norm": 6.411559077543447, + "learning_rate": 2.3834285950991892e-06, + "loss": 0.6281, + "step": 7769 + }, + { + "epoch": 0.32062391681109187, + "grad_norm": 3.3608970247918926, + "learning_rate": 2.3832665689721066e-06, + "loss": 0.5718, + "step": 7770 + }, + { + "epoch": 0.3206651811504498, + "grad_norm": 2.894802773216902, + "learning_rate": 2.383104527067674e-06, + "loss": 0.5527, + "step": 7771 + }, + { + "epoch": 0.3207064454898077, + "grad_norm": 2.3895863193902596, + "learning_rate": 2.3829424693887846e-06, + "loss": 0.5138, + "step": 7772 + }, + { + "epoch": 0.3207477098291656, + "grad_norm": 3.4542168285316, + "learning_rate": 2.3827803959383346e-06, + "loss": 0.5338, + "step": 7773 + }, + { + "epoch": 0.3207889741685236, + "grad_norm": 2.6002971201565206, + "learning_rate": 2.382618306719218e-06, + "loss": 0.582, + "step": 7774 + }, + { + "epoch": 0.3208302385078815, + "grad_norm": 2.625727831420935, + "learning_rate": 2.3824562017343304e-06, + "loss": 0.5727, + "step": 7775 + }, + { + "epoch": 0.3208715028472394, + "grad_norm": 3.9690265182629667, + "learning_rate": 2.3822940809865674e-06, + "loss": 0.5748, + "step": 7776 + }, + { + "epoch": 0.32091276718659734, + "grad_norm": 3.590279735567704, + "learning_rate": 2.3821319444788255e-06, + "loss": 0.5126, + "step": 7777 + }, + { + "epoch": 0.32095403152595525, + "grad_norm": 4.177389961294781, + "learning_rate": 2.3819697922140003e-06, + "loss": 0.549, + "step": 7778 + }, + { + "epoch": 0.3209952958653132, + "grad_norm": 1.6520186231715726, + "learning_rate": 2.3818076241949878e-06, + "loss": 0.5006, + "step": 7779 + }, + { + "epoch": 0.32103656020467114, + "grad_norm": 3.6335163042551817, + "learning_rate": 2.3816454404246852e-06, + "loss": 0.5815, + "step": 7780 + }, + { + "epoch": 0.32107782454402906, + "grad_norm": 4.97987752677938, + "learning_rate": 2.3814832409059896e-06, + "loss": 0.5747, + "step": 7781 + }, + { + "epoch": 0.32111908888338697, + "grad_norm": 2.2725485170458692, + "learning_rate": 2.381321025641798e-06, + "loss": 0.5159, + "step": 7782 + }, + { + "epoch": 0.3211603532227449, + "grad_norm": 2.9146672169395176, + "learning_rate": 2.3811587946350083e-06, + "loss": 0.5418, + "step": 7783 + }, + { + "epoch": 0.3212016175621028, + "grad_norm": 2.7331730949799815, + "learning_rate": 2.380996547888518e-06, + "loss": 0.5399, + "step": 7784 + }, + { + "epoch": 0.3212428819014608, + "grad_norm": 7.821842327148662, + "learning_rate": 2.3808342854052264e-06, + "loss": 0.5966, + "step": 7785 + }, + { + "epoch": 0.3212841462408187, + "grad_norm": 2.5073101888790603, + "learning_rate": 2.38067200718803e-06, + "loss": 0.5791, + "step": 7786 + }, + { + "epoch": 0.3213254105801766, + "grad_norm": 2.396284523976921, + "learning_rate": 2.380509713239829e-06, + "loss": 0.5258, + "step": 7787 + }, + { + "epoch": 0.3213666749195345, + "grad_norm": 3.1136280756217602, + "learning_rate": 2.3803474035635213e-06, + "loss": 0.5237, + "step": 7788 + }, + { + "epoch": 0.32140793925889244, + "grad_norm": 2.593000683706575, + "learning_rate": 2.380185078162007e-06, + "loss": 0.5366, + "step": 7789 + }, + { + "epoch": 0.3214492035982504, + "grad_norm": 3.840541614218249, + "learning_rate": 2.380022737038185e-06, + "loss": 0.5348, + "step": 7790 + }, + { + "epoch": 0.3214904679376083, + "grad_norm": 5.480529475565101, + "learning_rate": 2.379860380194956e-06, + "loss": 0.5037, + "step": 7791 + }, + { + "epoch": 0.32153173227696624, + "grad_norm": 3.5129581769017344, + "learning_rate": 2.379698007635219e-06, + "loss": 0.5677, + "step": 7792 + }, + { + "epoch": 0.32157299661632416, + "grad_norm": 3.6841345877829372, + "learning_rate": 2.3795356193618753e-06, + "loss": 0.5633, + "step": 7793 + }, + { + "epoch": 0.3216142609556821, + "grad_norm": 6.258511052722845, + "learning_rate": 2.379373215377825e-06, + "loss": 0.588, + "step": 7794 + }, + { + "epoch": 0.32165552529504005, + "grad_norm": 6.186818412355261, + "learning_rate": 2.379210795685969e-06, + "loss": 0.5096, + "step": 7795 + }, + { + "epoch": 0.32169678963439796, + "grad_norm": 4.9279194844003475, + "learning_rate": 2.379048360289209e-06, + "loss": 0.5212, + "step": 7796 + }, + { + "epoch": 0.3217380539737559, + "grad_norm": 2.6325543347599707, + "learning_rate": 2.378885909190446e-06, + "loss": 0.6061, + "step": 7797 + }, + { + "epoch": 0.3217793183131138, + "grad_norm": 4.430109249337435, + "learning_rate": 2.3787234423925823e-06, + "loss": 0.4922, + "step": 7798 + }, + { + "epoch": 0.3218205826524717, + "grad_norm": 3.1392332992531844, + "learning_rate": 2.3785609598985198e-06, + "loss": 0.5083, + "step": 7799 + }, + { + "epoch": 0.3218618469918297, + "grad_norm": 6.960320970728143, + "learning_rate": 2.37839846171116e-06, + "loss": 0.5965, + "step": 7800 + }, + { + "epoch": 0.3219031113311876, + "grad_norm": 4.7677573880473805, + "learning_rate": 2.378235947833407e-06, + "loss": 0.5694, + "step": 7801 + }, + { + "epoch": 0.3219443756705455, + "grad_norm": 8.193208026119608, + "learning_rate": 2.378073418268163e-06, + "loss": 0.5902, + "step": 7802 + }, + { + "epoch": 0.32198564000990343, + "grad_norm": 2.843392703986562, + "learning_rate": 2.377910873018331e-06, + "loss": 0.6234, + "step": 7803 + }, + { + "epoch": 0.32202690434926134, + "grad_norm": 2.7802135874498006, + "learning_rate": 2.3777483120868146e-06, + "loss": 0.4816, + "step": 7804 + }, + { + "epoch": 0.3220681686886193, + "grad_norm": 3.8320669200772657, + "learning_rate": 2.3775857354765174e-06, + "loss": 0.5056, + "step": 7805 + }, + { + "epoch": 0.32210943302797723, + "grad_norm": 5.0875650251125855, + "learning_rate": 2.3774231431903436e-06, + "loss": 0.5537, + "step": 7806 + }, + { + "epoch": 0.32215069736733515, + "grad_norm": 7.6495980183573815, + "learning_rate": 2.3772605352311974e-06, + "loss": 0.5509, + "step": 7807 + }, + { + "epoch": 0.32219196170669306, + "grad_norm": 2.110146487345104, + "learning_rate": 2.3770979116019836e-06, + "loss": 0.6121, + "step": 7808 + }, + { + "epoch": 0.322233226046051, + "grad_norm": 3.0386891482488076, + "learning_rate": 2.376935272305607e-06, + "loss": 0.5299, + "step": 7809 + }, + { + "epoch": 0.32227449038540895, + "grad_norm": 3.246416170133708, + "learning_rate": 2.3767726173449727e-06, + "loss": 0.5266, + "step": 7810 + }, + { + "epoch": 0.32231575472476687, + "grad_norm": 3.4188550163792923, + "learning_rate": 2.376609946722986e-06, + "loss": 0.5582, + "step": 7811 + }, + { + "epoch": 0.3223570190641248, + "grad_norm": 3.7262537191167837, + "learning_rate": 2.376447260442553e-06, + "loss": 0.5537, + "step": 7812 + }, + { + "epoch": 0.3223982834034827, + "grad_norm": 7.794485155853363, + "learning_rate": 2.3762845585065788e-06, + "loss": 0.5175, + "step": 7813 + }, + { + "epoch": 0.3224395477428406, + "grad_norm": 4.82918030131194, + "learning_rate": 2.3761218409179707e-06, + "loss": 0.5302, + "step": 7814 + }, + { + "epoch": 0.3224808120821986, + "grad_norm": 2.293876899117569, + "learning_rate": 2.3759591076796345e-06, + "loss": 0.5504, + "step": 7815 + }, + { + "epoch": 0.3225220764215565, + "grad_norm": 3.5342678724747674, + "learning_rate": 2.375796358794478e-06, + "loss": 0.5681, + "step": 7816 + }, + { + "epoch": 0.3225633407609144, + "grad_norm": 2.654286091055993, + "learning_rate": 2.3756335942654066e-06, + "loss": 0.5046, + "step": 7817 + }, + { + "epoch": 0.32260460510027233, + "grad_norm": 2.26990402646618, + "learning_rate": 2.3754708140953295e-06, + "loss": 0.6058, + "step": 7818 + }, + { + "epoch": 0.32264586943963025, + "grad_norm": 5.959325065152927, + "learning_rate": 2.375308018287153e-06, + "loss": 0.571, + "step": 7819 + }, + { + "epoch": 0.3226871337789882, + "grad_norm": 2.991877730974843, + "learning_rate": 2.375145206843786e-06, + "loss": 0.5207, + "step": 7820 + }, + { + "epoch": 0.32272839811834614, + "grad_norm": 2.5715237795555903, + "learning_rate": 2.3749823797681368e-06, + "loss": 0.5643, + "step": 7821 + }, + { + "epoch": 0.32276966245770405, + "grad_norm": 3.467084387250564, + "learning_rate": 2.374819537063112e-06, + "loss": 0.5762, + "step": 7822 + }, + { + "epoch": 0.32281092679706197, + "grad_norm": 3.5246782713864517, + "learning_rate": 2.374656678731623e-06, + "loss": 0.5424, + "step": 7823 + }, + { + "epoch": 0.3228521911364199, + "grad_norm": 31.900879415582324, + "learning_rate": 2.374493804776578e-06, + "loss": 0.5191, + "step": 7824 + }, + { + "epoch": 0.32289345547577786, + "grad_norm": 6.225346909057718, + "learning_rate": 2.3743309152008848e-06, + "loss": 0.5823, + "step": 7825 + }, + { + "epoch": 0.3229347198151358, + "grad_norm": 2.6185557719407795, + "learning_rate": 2.3741680100074546e-06, + "loss": 0.6111, + "step": 7826 + }, + { + "epoch": 0.3229759841544937, + "grad_norm": 6.827106245181326, + "learning_rate": 2.3740050891991974e-06, + "loss": 0.5123, + "step": 7827 + }, + { + "epoch": 0.3230172484938516, + "grad_norm": 9.36027700274212, + "learning_rate": 2.373842152779022e-06, + "loss": 0.5063, + "step": 7828 + }, + { + "epoch": 0.3230585128332095, + "grad_norm": 2.6734994401086367, + "learning_rate": 2.3736792007498408e-06, + "loss": 0.4969, + "step": 7829 + }, + { + "epoch": 0.3230997771725675, + "grad_norm": 2.6089196842000457, + "learning_rate": 2.373516233114563e-06, + "loss": 0.4979, + "step": 7830 + }, + { + "epoch": 0.3231410415119254, + "grad_norm": 2.4269893017723363, + "learning_rate": 2.3733532498761e-06, + "loss": 0.5014, + "step": 7831 + }, + { + "epoch": 0.3231823058512833, + "grad_norm": 4.0337953575635055, + "learning_rate": 2.3731902510373634e-06, + "loss": 0.516, + "step": 7832 + }, + { + "epoch": 0.32322357019064124, + "grad_norm": 2.888258504768625, + "learning_rate": 2.3730272366012643e-06, + "loss": 0.5877, + "step": 7833 + }, + { + "epoch": 0.32326483452999916, + "grad_norm": 2.8216369354651305, + "learning_rate": 2.372864206570715e-06, + "loss": 0.5948, + "step": 7834 + }, + { + "epoch": 0.32330609886935713, + "grad_norm": 3.4997812248317763, + "learning_rate": 2.3727011609486266e-06, + "loss": 0.5734, + "step": 7835 + }, + { + "epoch": 0.32334736320871504, + "grad_norm": 3.217866763638921, + "learning_rate": 2.372538099737913e-06, + "loss": 0.5068, + "step": 7836 + }, + { + "epoch": 0.32338862754807296, + "grad_norm": 3.6409481580566188, + "learning_rate": 2.3723750229414866e-06, + "loss": 0.5458, + "step": 7837 + }, + { + "epoch": 0.3234298918874309, + "grad_norm": 2.938795738387423, + "learning_rate": 2.372211930562259e-06, + "loss": 0.549, + "step": 7838 + }, + { + "epoch": 0.3234711562267888, + "grad_norm": 4.292434589972658, + "learning_rate": 2.372048822603145e-06, + "loss": 0.5582, + "step": 7839 + }, + { + "epoch": 0.32351242056614676, + "grad_norm": 5.746735303640071, + "learning_rate": 2.371885699067057e-06, + "loss": 0.5411, + "step": 7840 + }, + { + "epoch": 0.3235536849055047, + "grad_norm": 2.286615122250781, + "learning_rate": 2.3717225599569095e-06, + "loss": 0.5533, + "step": 7841 + }, + { + "epoch": 0.3235949492448626, + "grad_norm": 9.608312810543854, + "learning_rate": 2.371559405275617e-06, + "loss": 0.5845, + "step": 7842 + }, + { + "epoch": 0.3236362135842205, + "grad_norm": 3.035286333264598, + "learning_rate": 2.371396235026092e-06, + "loss": 0.5376, + "step": 7843 + }, + { + "epoch": 0.3236774779235784, + "grad_norm": 2.81364453175458, + "learning_rate": 2.371233049211251e-06, + "loss": 0.5329, + "step": 7844 + }, + { + "epoch": 0.32371874226293634, + "grad_norm": 2.3888887827459007, + "learning_rate": 2.371069847834008e-06, + "loss": 0.5283, + "step": 7845 + }, + { + "epoch": 0.3237600066022943, + "grad_norm": 3.4072735484521246, + "learning_rate": 2.370906630897279e-06, + "loss": 0.4573, + "step": 7846 + }, + { + "epoch": 0.32380127094165223, + "grad_norm": 2.6822665622157396, + "learning_rate": 2.3707433984039785e-06, + "loss": 0.5143, + "step": 7847 + }, + { + "epoch": 0.32384253528101015, + "grad_norm": 3.0536139213373827, + "learning_rate": 2.3705801503570227e-06, + "loss": 0.535, + "step": 7848 + }, + { + "epoch": 0.32388379962036806, + "grad_norm": 4.939455065336642, + "learning_rate": 2.3704168867593276e-06, + "loss": 0.5544, + "step": 7849 + }, + { + "epoch": 0.323925063959726, + "grad_norm": 6.86191770144818, + "learning_rate": 2.3702536076138096e-06, + "loss": 0.5353, + "step": 7850 + }, + { + "epoch": 0.32396632829908395, + "grad_norm": 3.8400298246996676, + "learning_rate": 2.370090312923385e-06, + "loss": 0.5779, + "step": 7851 + }, + { + "epoch": 0.32400759263844187, + "grad_norm": 7.424107090854134, + "learning_rate": 2.369927002690971e-06, + "loss": 0.524, + "step": 7852 + }, + { + "epoch": 0.3240488569777998, + "grad_norm": 2.615783472256837, + "learning_rate": 2.369763676919484e-06, + "loss": 0.4774, + "step": 7853 + }, + { + "epoch": 0.3240901213171577, + "grad_norm": 3.6842182161075048, + "learning_rate": 2.3696003356118417e-06, + "loss": 0.5243, + "step": 7854 + }, + { + "epoch": 0.3241313856565156, + "grad_norm": 4.742405356645486, + "learning_rate": 2.369436978770963e-06, + "loss": 0.525, + "step": 7855 + }, + { + "epoch": 0.3241726499958736, + "grad_norm": 2.995206815144664, + "learning_rate": 2.369273606399764e-06, + "loss": 0.6418, + "step": 7856 + }, + { + "epoch": 0.3242139143352315, + "grad_norm": 3.3835135328281103, + "learning_rate": 2.3691102185011643e-06, + "loss": 0.5387, + "step": 7857 + }, + { + "epoch": 0.3242551786745894, + "grad_norm": 3.7950418986275967, + "learning_rate": 2.3689468150780816e-06, + "loss": 0.5296, + "step": 7858 + }, + { + "epoch": 0.32429644301394733, + "grad_norm": 6.774244633618, + "learning_rate": 2.3687833961334352e-06, + "loss": 0.5315, + "step": 7859 + }, + { + "epoch": 0.32433770735330525, + "grad_norm": 2.139520186159243, + "learning_rate": 2.3686199616701436e-06, + "loss": 0.6377, + "step": 7860 + }, + { + "epoch": 0.3243789716926632, + "grad_norm": 8.333435965128396, + "learning_rate": 2.368456511691127e-06, + "loss": 0.6213, + "step": 7861 + }, + { + "epoch": 0.32442023603202114, + "grad_norm": 12.822042891813831, + "learning_rate": 2.3682930461993045e-06, + "loss": 0.5055, + "step": 7862 + }, + { + "epoch": 0.32446150037137905, + "grad_norm": 4.500514680883882, + "learning_rate": 2.3681295651975957e-06, + "loss": 0.5852, + "step": 7863 + }, + { + "epoch": 0.32450276471073697, + "grad_norm": 4.263429595580679, + "learning_rate": 2.3679660686889217e-06, + "loss": 0.5952, + "step": 7864 + }, + { + "epoch": 0.3245440290500949, + "grad_norm": 3.2307591651894865, + "learning_rate": 2.3678025566762025e-06, + "loss": 0.543, + "step": 7865 + }, + { + "epoch": 0.32458529338945286, + "grad_norm": 2.606581110124594, + "learning_rate": 2.367639029162358e-06, + "loss": 0.5742, + "step": 7866 + }, + { + "epoch": 0.32462655772881077, + "grad_norm": 2.2025916067467537, + "learning_rate": 2.3674754861503102e-06, + "loss": 0.5512, + "step": 7867 + }, + { + "epoch": 0.3246678220681687, + "grad_norm": 3.9851563435698685, + "learning_rate": 2.3673119276429805e-06, + "loss": 0.5597, + "step": 7868 + }, + { + "epoch": 0.3247090864075266, + "grad_norm": 3.058285742223653, + "learning_rate": 2.3671483536432895e-06, + "loss": 0.5601, + "step": 7869 + }, + { + "epoch": 0.3247503507468845, + "grad_norm": 5.183495154982515, + "learning_rate": 2.36698476415416e-06, + "loss": 0.5828, + "step": 7870 + }, + { + "epoch": 0.3247916150862425, + "grad_norm": 2.9110198911000342, + "learning_rate": 2.366821159178514e-06, + "loss": 0.5423, + "step": 7871 + }, + { + "epoch": 0.3248328794256004, + "grad_norm": 5.400196218596581, + "learning_rate": 2.366657538719273e-06, + "loss": 0.5937, + "step": 7872 + }, + { + "epoch": 0.3248741437649583, + "grad_norm": 3.305454022784982, + "learning_rate": 2.3664939027793608e-06, + "loss": 0.5687, + "step": 7873 + }, + { + "epoch": 0.32491540810431624, + "grad_norm": 6.479924362224196, + "learning_rate": 2.3663302513616994e-06, + "loss": 0.4996, + "step": 7874 + }, + { + "epoch": 0.32495667244367415, + "grad_norm": 3.8727605803944094, + "learning_rate": 2.366166584469213e-06, + "loss": 0.548, + "step": 7875 + }, + { + "epoch": 0.3249979367830321, + "grad_norm": 3.640767329265731, + "learning_rate": 2.366002902104824e-06, + "loss": 0.544, + "step": 7876 + }, + { + "epoch": 0.32503920112239004, + "grad_norm": 2.111137783941466, + "learning_rate": 2.3658392042714573e-06, + "loss": 0.5702, + "step": 7877 + }, + { + "epoch": 0.32508046546174796, + "grad_norm": 1.624491283657052, + "learning_rate": 2.3656754909720363e-06, + "loss": 0.5494, + "step": 7878 + }, + { + "epoch": 0.3251217298011059, + "grad_norm": 5.899440055469875, + "learning_rate": 2.3655117622094854e-06, + "loss": 0.5701, + "step": 7879 + }, + { + "epoch": 0.3251629941404638, + "grad_norm": 3.3613346206268053, + "learning_rate": 2.365348017986729e-06, + "loss": 0.601, + "step": 7880 + }, + { + "epoch": 0.32520425847982176, + "grad_norm": 3.9218282413414904, + "learning_rate": 2.3651842583066925e-06, + "loss": 0.5782, + "step": 7881 + }, + { + "epoch": 0.3252455228191797, + "grad_norm": 4.833423861276231, + "learning_rate": 2.365020483172301e-06, + "loss": 0.5584, + "step": 7882 + }, + { + "epoch": 0.3252867871585376, + "grad_norm": 2.549295529028387, + "learning_rate": 2.3648566925864793e-06, + "loss": 0.602, + "step": 7883 + }, + { + "epoch": 0.3253280514978955, + "grad_norm": 3.086102420590901, + "learning_rate": 2.3646928865521535e-06, + "loss": 0.565, + "step": 7884 + }, + { + "epoch": 0.3253693158372534, + "grad_norm": 3.6714501855483213, + "learning_rate": 2.3645290650722496e-06, + "loss": 0.5278, + "step": 7885 + }, + { + "epoch": 0.3254105801766114, + "grad_norm": 5.497523460209538, + "learning_rate": 2.3643652281496943e-06, + "loss": 0.507, + "step": 7886 + }, + { + "epoch": 0.3254518445159693, + "grad_norm": 21.130583297000687, + "learning_rate": 2.3642013757874134e-06, + "loss": 0.5057, + "step": 7887 + }, + { + "epoch": 0.32549310885532723, + "grad_norm": 2.284745371345098, + "learning_rate": 2.3640375079883345e-06, + "loss": 0.5348, + "step": 7888 + }, + { + "epoch": 0.32553437319468514, + "grad_norm": 3.5802590795682656, + "learning_rate": 2.363873624755384e-06, + "loss": 0.5872, + "step": 7889 + }, + { + "epoch": 0.32557563753404306, + "grad_norm": 2.42901573938568, + "learning_rate": 2.363709726091489e-06, + "loss": 0.527, + "step": 7890 + }, + { + "epoch": 0.32561690187340103, + "grad_norm": 3.487658489755143, + "learning_rate": 2.3635458119995782e-06, + "loss": 0.5273, + "step": 7891 + }, + { + "epoch": 0.32565816621275895, + "grad_norm": 4.418173205101755, + "learning_rate": 2.3633818824825786e-06, + "loss": 0.5796, + "step": 7892 + }, + { + "epoch": 0.32569943055211686, + "grad_norm": 3.4201510082526805, + "learning_rate": 2.3632179375434185e-06, + "loss": 0.4967, + "step": 7893 + }, + { + "epoch": 0.3257406948914748, + "grad_norm": 2.982546393122122, + "learning_rate": 2.363053977185027e-06, + "loss": 0.5792, + "step": 7894 + }, + { + "epoch": 0.3257819592308327, + "grad_norm": 3.674601213665676, + "learning_rate": 2.362890001410332e-06, + "loss": 0.5399, + "step": 7895 + }, + { + "epoch": 0.32582322357019067, + "grad_norm": 6.198210430663579, + "learning_rate": 2.3627260102222635e-06, + "loss": 0.5862, + "step": 7896 + }, + { + "epoch": 0.3258644879095486, + "grad_norm": 3.8797157621976845, + "learning_rate": 2.3625620036237504e-06, + "loss": 0.5201, + "step": 7897 + }, + { + "epoch": 0.3259057522489065, + "grad_norm": 2.243338721634218, + "learning_rate": 2.362397981617722e-06, + "loss": 0.5189, + "step": 7898 + }, + { + "epoch": 0.3259470165882644, + "grad_norm": 3.4181252136382563, + "learning_rate": 2.3622339442071075e-06, + "loss": 0.5552, + "step": 7899 + }, + { + "epoch": 0.32598828092762233, + "grad_norm": 2.521657257254479, + "learning_rate": 2.3620698913948384e-06, + "loss": 0.5545, + "step": 7900 + }, + { + "epoch": 0.3260295452669803, + "grad_norm": 16.710098506024067, + "learning_rate": 2.361905823183844e-06, + "loss": 0.5967, + "step": 7901 + }, + { + "epoch": 0.3260708096063382, + "grad_norm": 32.391426838388064, + "learning_rate": 2.3617417395770557e-06, + "loss": 0.5359, + "step": 7902 + }, + { + "epoch": 0.32611207394569613, + "grad_norm": 3.0400578269047194, + "learning_rate": 2.361577640577405e-06, + "loss": 0.4838, + "step": 7903 + }, + { + "epoch": 0.32615333828505405, + "grad_norm": 2.1925598506467794, + "learning_rate": 2.3614135261878216e-06, + "loss": 0.5485, + "step": 7904 + }, + { + "epoch": 0.32619460262441197, + "grad_norm": 7.690348186941788, + "learning_rate": 2.361249396411237e-06, + "loss": 0.4841, + "step": 7905 + }, + { + "epoch": 0.3262358669637699, + "grad_norm": 2.898690335897508, + "learning_rate": 2.3610852512505846e-06, + "loss": 0.5199, + "step": 7906 + }, + { + "epoch": 0.32627713130312785, + "grad_norm": 58.52863636028613, + "learning_rate": 2.3609210907087952e-06, + "loss": 0.554, + "step": 7907 + }, + { + "epoch": 0.32631839564248577, + "grad_norm": 2.284523771817008, + "learning_rate": 2.3607569147888017e-06, + "loss": 0.4608, + "step": 7908 + }, + { + "epoch": 0.3263596599818437, + "grad_norm": 2.925653941414772, + "learning_rate": 2.360592723493536e-06, + "loss": 0.5792, + "step": 7909 + }, + { + "epoch": 0.3264009243212016, + "grad_norm": 4.05031238936867, + "learning_rate": 2.360428516825932e-06, + "loss": 0.5161, + "step": 7910 + }, + { + "epoch": 0.3264421886605595, + "grad_norm": 4.257089139537273, + "learning_rate": 2.3602642947889214e-06, + "loss": 0.5249, + "step": 7911 + }, + { + "epoch": 0.3264834529999175, + "grad_norm": 3.7172548251772675, + "learning_rate": 2.360100057385439e-06, + "loss": 0.5481, + "step": 7912 + }, + { + "epoch": 0.3265247173392754, + "grad_norm": 8.715929698026653, + "learning_rate": 2.359935804618418e-06, + "loss": 0.525, + "step": 7913 + }, + { + "epoch": 0.3265659816786333, + "grad_norm": 5.150224782337808, + "learning_rate": 2.3597715364907916e-06, + "loss": 0.5677, + "step": 7914 + }, + { + "epoch": 0.32660724601799124, + "grad_norm": 5.992825064981359, + "learning_rate": 2.3596072530054953e-06, + "loss": 0.5372, + "step": 7915 + }, + { + "epoch": 0.32664851035734915, + "grad_norm": 8.464064827746704, + "learning_rate": 2.359442954165463e-06, + "loss": 0.5348, + "step": 7916 + }, + { + "epoch": 0.3266897746967071, + "grad_norm": 3.748949072172828, + "learning_rate": 2.3592786399736298e-06, + "loss": 0.5807, + "step": 7917 + }, + { + "epoch": 0.32673103903606504, + "grad_norm": 3.1397204605084803, + "learning_rate": 2.35911431043293e-06, + "loss": 0.5733, + "step": 7918 + }, + { + "epoch": 0.32677230337542296, + "grad_norm": 3.976440585970796, + "learning_rate": 2.3589499655462997e-06, + "loss": 0.4959, + "step": 7919 + }, + { + "epoch": 0.3268135677147809, + "grad_norm": 2.454403496058517, + "learning_rate": 2.3587856053166742e-06, + "loss": 0.5082, + "step": 7920 + }, + { + "epoch": 0.3268548320541388, + "grad_norm": 5.353576838279565, + "learning_rate": 2.3586212297469895e-06, + "loss": 0.5739, + "step": 7921 + }, + { + "epoch": 0.32689609639349676, + "grad_norm": 5.816979340590145, + "learning_rate": 2.3584568388401816e-06, + "loss": 0.5707, + "step": 7922 + }, + { + "epoch": 0.3269373607328547, + "grad_norm": 2.9812121525839914, + "learning_rate": 2.3582924325991873e-06, + "loss": 0.6137, + "step": 7923 + }, + { + "epoch": 0.3269786250722126, + "grad_norm": 3.111462657273012, + "learning_rate": 2.358128011026943e-06, + "loss": 0.6267, + "step": 7924 + }, + { + "epoch": 0.3270198894115705, + "grad_norm": 17.321256961961186, + "learning_rate": 2.3579635741263853e-06, + "loss": 0.5125, + "step": 7925 + }, + { + "epoch": 0.3270611537509284, + "grad_norm": 5.767669516214531, + "learning_rate": 2.357799121900452e-06, + "loss": 0.5236, + "step": 7926 + }, + { + "epoch": 0.3271024180902864, + "grad_norm": 5.927528799520128, + "learning_rate": 2.357634654352081e-06, + "loss": 0.5319, + "step": 7927 + }, + { + "epoch": 0.3271436824296443, + "grad_norm": 2.9228304912632934, + "learning_rate": 2.3574701714842087e-06, + "loss": 0.557, + "step": 7928 + }, + { + "epoch": 0.3271849467690022, + "grad_norm": 4.0109068927855684, + "learning_rate": 2.3573056732997747e-06, + "loss": 0.5341, + "step": 7929 + }, + { + "epoch": 0.32722621110836014, + "grad_norm": 4.920063291718701, + "learning_rate": 2.3571411598017164e-06, + "loss": 0.6018, + "step": 7930 + }, + { + "epoch": 0.32726747544771806, + "grad_norm": 5.4825272230754845, + "learning_rate": 2.356976630992973e-06, + "loss": 0.582, + "step": 7931 + }, + { + "epoch": 0.32730873978707603, + "grad_norm": 4.445974446583459, + "learning_rate": 2.3568120868764835e-06, + "loss": 0.501, + "step": 7932 + }, + { + "epoch": 0.32735000412643395, + "grad_norm": 28.64973778729409, + "learning_rate": 2.356647527455186e-06, + "loss": 0.5255, + "step": 7933 + }, + { + "epoch": 0.32739126846579186, + "grad_norm": 6.399200452451752, + "learning_rate": 2.356482952732021e-06, + "loss": 0.5388, + "step": 7934 + }, + { + "epoch": 0.3274325328051498, + "grad_norm": 28.241860407021, + "learning_rate": 2.3563183627099276e-06, + "loss": 0.5557, + "step": 7935 + }, + { + "epoch": 0.3274737971445077, + "grad_norm": 3.092599976214144, + "learning_rate": 2.356153757391846e-06, + "loss": 0.5331, + "step": 7936 + }, + { + "epoch": 0.32751506148386567, + "grad_norm": 3.380277296263371, + "learning_rate": 2.3559891367807174e-06, + "loss": 0.5281, + "step": 7937 + }, + { + "epoch": 0.3275563258232236, + "grad_norm": 6.305818425045415, + "learning_rate": 2.355824500879481e-06, + "loss": 0.5353, + "step": 7938 + }, + { + "epoch": 0.3275975901625815, + "grad_norm": 3.159444745002597, + "learning_rate": 2.3556598496910773e-06, + "loss": 0.5506, + "step": 7939 + }, + { + "epoch": 0.3276388545019394, + "grad_norm": 2.979507290591126, + "learning_rate": 2.355495183218449e-06, + "loss": 0.5489, + "step": 7940 + }, + { + "epoch": 0.32768011884129733, + "grad_norm": 15.974152303441205, + "learning_rate": 2.3553305014645367e-06, + "loss": 0.5422, + "step": 7941 + }, + { + "epoch": 0.3277213831806553, + "grad_norm": 5.025228287022481, + "learning_rate": 2.3551658044322815e-06, + "loss": 0.5615, + "step": 7942 + }, + { + "epoch": 0.3277626475200132, + "grad_norm": 3.802330935436194, + "learning_rate": 2.3550010921246257e-06, + "loss": 0.5072, + "step": 7943 + }, + { + "epoch": 0.32780391185937113, + "grad_norm": 3.4477739212293366, + "learning_rate": 2.3548363645445122e-06, + "loss": 0.5585, + "step": 7944 + }, + { + "epoch": 0.32784517619872905, + "grad_norm": 2.6390650319094666, + "learning_rate": 2.354671621694882e-06, + "loss": 0.5351, + "step": 7945 + }, + { + "epoch": 0.32788644053808697, + "grad_norm": 2.328069634302451, + "learning_rate": 2.3545068635786786e-06, + "loss": 0.5456, + "step": 7946 + }, + { + "epoch": 0.32792770487744494, + "grad_norm": 3.7444065170534624, + "learning_rate": 2.3543420901988457e-06, + "loss": 0.5849, + "step": 7947 + }, + { + "epoch": 0.32796896921680285, + "grad_norm": 2.8959990449586, + "learning_rate": 2.354177301558325e-06, + "loss": 0.5351, + "step": 7948 + }, + { + "epoch": 0.32801023355616077, + "grad_norm": 4.6402252208243775, + "learning_rate": 2.3540124976600616e-06, + "loss": 0.5414, + "step": 7949 + }, + { + "epoch": 0.3280514978955187, + "grad_norm": 6.887530963349359, + "learning_rate": 2.3538476785069977e-06, + "loss": 0.469, + "step": 7950 + }, + { + "epoch": 0.3280927622348766, + "grad_norm": 4.733903768943295, + "learning_rate": 2.353682844102079e-06, + "loss": 0.5768, + "step": 7951 + }, + { + "epoch": 0.32813402657423457, + "grad_norm": 3.2632465816335685, + "learning_rate": 2.353517994448249e-06, + "loss": 0.5123, + "step": 7952 + }, + { + "epoch": 0.3281752909135925, + "grad_norm": 3.9846421051100296, + "learning_rate": 2.353353129548452e-06, + "loss": 0.5244, + "step": 7953 + }, + { + "epoch": 0.3282165552529504, + "grad_norm": 4.359696950821509, + "learning_rate": 2.3531882494056333e-06, + "loss": 0.5496, + "step": 7954 + }, + { + "epoch": 0.3282578195923083, + "grad_norm": 2.3457651805071116, + "learning_rate": 2.3530233540227386e-06, + "loss": 0.5542, + "step": 7955 + }, + { + "epoch": 0.32829908393166624, + "grad_norm": 18.336335855253452, + "learning_rate": 2.3528584434027126e-06, + "loss": 0.5982, + "step": 7956 + }, + { + "epoch": 0.3283403482710242, + "grad_norm": 3.383357466879846, + "learning_rate": 2.3526935175485017e-06, + "loss": 0.5907, + "step": 7957 + }, + { + "epoch": 0.3283816126103821, + "grad_norm": 4.071651524990132, + "learning_rate": 2.352528576463051e-06, + "loss": 0.5647, + "step": 7958 + }, + { + "epoch": 0.32842287694974004, + "grad_norm": 2.800214067650718, + "learning_rate": 2.3523636201493076e-06, + "loss": 0.5244, + "step": 7959 + }, + { + "epoch": 0.32846414128909796, + "grad_norm": 4.908988984112443, + "learning_rate": 2.3521986486102172e-06, + "loss": 0.5506, + "step": 7960 + }, + { + "epoch": 0.32850540562845587, + "grad_norm": 11.406835647233029, + "learning_rate": 2.352033661848727e-06, + "loss": 0.5516, + "step": 7961 + }, + { + "epoch": 0.32854666996781384, + "grad_norm": 5.2854432982671105, + "learning_rate": 2.3518686598677846e-06, + "loss": 0.5214, + "step": 7962 + }, + { + "epoch": 0.32858793430717176, + "grad_norm": 5.415775339248621, + "learning_rate": 2.351703642670337e-06, + "loss": 0.5451, + "step": 7963 + }, + { + "epoch": 0.3286291986465297, + "grad_norm": 19.017423779713344, + "learning_rate": 2.3515386102593314e-06, + "loss": 0.5728, + "step": 7964 + }, + { + "epoch": 0.3286704629858876, + "grad_norm": 6.650550756709342, + "learning_rate": 2.351373562637716e-06, + "loss": 0.5362, + "step": 7965 + }, + { + "epoch": 0.3287117273252455, + "grad_norm": 2.9408035475370022, + "learning_rate": 2.351208499808439e-06, + "loss": 0.5691, + "step": 7966 + }, + { + "epoch": 0.3287529916646034, + "grad_norm": 3.0773106504019934, + "learning_rate": 2.3510434217744485e-06, + "loss": 0.55, + "step": 7967 + }, + { + "epoch": 0.3287942560039614, + "grad_norm": 3.8237510887338924, + "learning_rate": 2.350878328538694e-06, + "loss": 0.5169, + "step": 7968 + }, + { + "epoch": 0.3288355203433193, + "grad_norm": 2.8932347656982893, + "learning_rate": 2.3507132201041235e-06, + "loss": 0.506, + "step": 7969 + }, + { + "epoch": 0.3288767846826772, + "grad_norm": 3.0082422407402727, + "learning_rate": 2.3505480964736873e-06, + "loss": 0.5392, + "step": 7970 + }, + { + "epoch": 0.32891804902203514, + "grad_norm": 2.5846636299849424, + "learning_rate": 2.350382957650334e-06, + "loss": 0.5137, + "step": 7971 + }, + { + "epoch": 0.32895931336139306, + "grad_norm": 3.2283194166183224, + "learning_rate": 2.3502178036370135e-06, + "loss": 0.5507, + "step": 7972 + }, + { + "epoch": 0.32900057770075103, + "grad_norm": 3.798398145582768, + "learning_rate": 2.3500526344366765e-06, + "loss": 0.6105, + "step": 7973 + }, + { + "epoch": 0.32904184204010894, + "grad_norm": 2.089993288767481, + "learning_rate": 2.3498874500522727e-06, + "loss": 0.5711, + "step": 7974 + }, + { + "epoch": 0.32908310637946686, + "grad_norm": 3.9672472440009945, + "learning_rate": 2.349722250486753e-06, + "loss": 0.5204, + "step": 7975 + }, + { + "epoch": 0.3291243707188248, + "grad_norm": 4.917362732416167, + "learning_rate": 2.3495570357430686e-06, + "loss": 0.5425, + "step": 7976 + }, + { + "epoch": 0.3291656350581827, + "grad_norm": 3.274092091244971, + "learning_rate": 2.34939180582417e-06, + "loss": 0.535, + "step": 7977 + }, + { + "epoch": 0.32920689939754066, + "grad_norm": 3.869203278536237, + "learning_rate": 2.349226560733009e-06, + "loss": 0.5345, + "step": 7978 + }, + { + "epoch": 0.3292481637368986, + "grad_norm": 3.589258407875371, + "learning_rate": 2.3490613004725367e-06, + "loss": 0.5509, + "step": 7979 + }, + { + "epoch": 0.3292894280762565, + "grad_norm": 8.314357309584707, + "learning_rate": 2.3488960250457063e-06, + "loss": 0.5426, + "step": 7980 + }, + { + "epoch": 0.3293306924156144, + "grad_norm": 2.4577479887569127, + "learning_rate": 2.3487307344554686e-06, + "loss": 0.6104, + "step": 7981 + }, + { + "epoch": 0.32937195675497233, + "grad_norm": 6.579217276273421, + "learning_rate": 2.3485654287047774e-06, + "loss": 0.5234, + "step": 7982 + }, + { + "epoch": 0.3294132210943303, + "grad_norm": 2.628258393924072, + "learning_rate": 2.3484001077965844e-06, + "loss": 0.4846, + "step": 7983 + }, + { + "epoch": 0.3294544854336882, + "grad_norm": 3.626115466126548, + "learning_rate": 2.3482347717338434e-06, + "loss": 0.5729, + "step": 7984 + }, + { + "epoch": 0.32949574977304613, + "grad_norm": 2.65218598294585, + "learning_rate": 2.348069420519507e-06, + "loss": 0.5098, + "step": 7985 + }, + { + "epoch": 0.32953701411240405, + "grad_norm": 4.598051001220265, + "learning_rate": 2.3479040541565292e-06, + "loss": 0.5317, + "step": 7986 + }, + { + "epoch": 0.32957827845176196, + "grad_norm": 2.425698864630205, + "learning_rate": 2.3477386726478642e-06, + "loss": 0.525, + "step": 7987 + }, + { + "epoch": 0.32961954279111993, + "grad_norm": 6.711762845143929, + "learning_rate": 2.347573275996466e-06, + "loss": 0.4903, + "step": 7988 + }, + { + "epoch": 0.32966080713047785, + "grad_norm": 14.959962208139466, + "learning_rate": 2.3474078642052885e-06, + "loss": 0.5554, + "step": 7989 + }, + { + "epoch": 0.32970207146983577, + "grad_norm": 3.6488494206185544, + "learning_rate": 2.3472424372772867e-06, + "loss": 0.5037, + "step": 7990 + }, + { + "epoch": 0.3297433358091937, + "grad_norm": 7.496994779258801, + "learning_rate": 2.3470769952154153e-06, + "loss": 0.5835, + "step": 7991 + }, + { + "epoch": 0.3297846001485516, + "grad_norm": 3.4268533435224753, + "learning_rate": 2.3469115380226302e-06, + "loss": 0.531, + "step": 7992 + }, + { + "epoch": 0.32982586448790957, + "grad_norm": 3.712394132427824, + "learning_rate": 2.346746065701886e-06, + "loss": 0.5674, + "step": 7993 + }, + { + "epoch": 0.3298671288272675, + "grad_norm": 6.1822597209082915, + "learning_rate": 2.3465805782561383e-06, + "loss": 0.6007, + "step": 7994 + }, + { + "epoch": 0.3299083931666254, + "grad_norm": 18.38994549168855, + "learning_rate": 2.3464150756883445e-06, + "loss": 0.5174, + "step": 7995 + }, + { + "epoch": 0.3299496575059833, + "grad_norm": 4.530616581644741, + "learning_rate": 2.34624955800146e-06, + "loss": 0.5166, + "step": 7996 + }, + { + "epoch": 0.32999092184534123, + "grad_norm": 3.4684438056725377, + "learning_rate": 2.346084025198441e-06, + "loss": 0.5451, + "step": 7997 + }, + { + "epoch": 0.3300321861846992, + "grad_norm": 3.4702638102326095, + "learning_rate": 2.3459184772822448e-06, + "loss": 0.5241, + "step": 7998 + }, + { + "epoch": 0.3300734505240571, + "grad_norm": 2.9171965100132193, + "learning_rate": 2.3457529142558287e-06, + "loss": 0.4725, + "step": 7999 + }, + { + "epoch": 0.33011471486341504, + "grad_norm": 2.6966154564775735, + "learning_rate": 2.345587336122149e-06, + "loss": 0.5665, + "step": 8000 + }, + { + "epoch": 0.33015597920277295, + "grad_norm": 4.357294881946396, + "learning_rate": 2.3454217428841655e-06, + "loss": 0.5548, + "step": 8001 + }, + { + "epoch": 0.33019724354213087, + "grad_norm": 5.506107236224093, + "learning_rate": 2.3452561345448338e-06, + "loss": 0.5202, + "step": 8002 + }, + { + "epoch": 0.33023850788148884, + "grad_norm": 3.9342883179986163, + "learning_rate": 2.3450905111071134e-06, + "loss": 0.5568, + "step": 8003 + }, + { + "epoch": 0.33027977222084676, + "grad_norm": 6.244085724179964, + "learning_rate": 2.344924872573962e-06, + "loss": 0.494, + "step": 8004 + }, + { + "epoch": 0.3303210365602047, + "grad_norm": 3.414383193368572, + "learning_rate": 2.344759218948339e-06, + "loss": 0.4818, + "step": 8005 + }, + { + "epoch": 0.3303623008995626, + "grad_norm": 6.72055872963499, + "learning_rate": 2.344593550233203e-06, + "loss": 0.5434, + "step": 8006 + }, + { + "epoch": 0.3304035652389205, + "grad_norm": 3.0230981576854172, + "learning_rate": 2.3444278664315133e-06, + "loss": 0.534, + "step": 8007 + }, + { + "epoch": 0.3304448295782785, + "grad_norm": 2.8048020840635073, + "learning_rate": 2.3442621675462298e-06, + "loss": 0.5279, + "step": 8008 + }, + { + "epoch": 0.3304860939176364, + "grad_norm": 2.7119837051318436, + "learning_rate": 2.3440964535803115e-06, + "loss": 0.5646, + "step": 8009 + }, + { + "epoch": 0.3305273582569943, + "grad_norm": 6.571873633308957, + "learning_rate": 2.3439307245367192e-06, + "loss": 0.5497, + "step": 8010 + }, + { + "epoch": 0.3305686225963522, + "grad_norm": 2.629520937962739, + "learning_rate": 2.3437649804184126e-06, + "loss": 0.5655, + "step": 8011 + }, + { + "epoch": 0.33060988693571014, + "grad_norm": 2.3563333033222964, + "learning_rate": 2.3435992212283532e-06, + "loss": 0.5474, + "step": 8012 + }, + { + "epoch": 0.3306511512750681, + "grad_norm": 3.304876951552382, + "learning_rate": 2.343433446969501e-06, + "loss": 0.5474, + "step": 8013 + }, + { + "epoch": 0.330692415614426, + "grad_norm": 3.383976349682364, + "learning_rate": 2.3432676576448178e-06, + "loss": 0.5882, + "step": 8014 + }, + { + "epoch": 0.33073367995378394, + "grad_norm": 4.18940747986757, + "learning_rate": 2.3431018532572646e-06, + "loss": 0.5675, + "step": 8015 + }, + { + "epoch": 0.33077494429314186, + "grad_norm": 8.190557388595677, + "learning_rate": 2.3429360338098027e-06, + "loss": 0.5956, + "step": 8016 + }, + { + "epoch": 0.3308162086324998, + "grad_norm": 4.839231802276091, + "learning_rate": 2.342770199305395e-06, + "loss": 0.5041, + "step": 8017 + }, + { + "epoch": 0.33085747297185775, + "grad_norm": 2.015949811407214, + "learning_rate": 2.3426043497470033e-06, + "loss": 0.5166, + "step": 8018 + }, + { + "epoch": 0.33089873731121566, + "grad_norm": 3.3520018133776213, + "learning_rate": 2.34243848513759e-06, + "loss": 0.5898, + "step": 8019 + }, + { + "epoch": 0.3309400016505736, + "grad_norm": 3.9985180524491257, + "learning_rate": 2.3422726054801174e-06, + "loss": 0.5392, + "step": 8020 + }, + { + "epoch": 0.3309812659899315, + "grad_norm": 2.1625408968314246, + "learning_rate": 2.3421067107775495e-06, + "loss": 0.5569, + "step": 8021 + }, + { + "epoch": 0.3310225303292894, + "grad_norm": 5.298565620134081, + "learning_rate": 2.3419408010328493e-06, + "loss": 0.5352, + "step": 8022 + }, + { + "epoch": 0.3310637946686474, + "grad_norm": 5.954887106501926, + "learning_rate": 2.3417748762489795e-06, + "loss": 0.5895, + "step": 8023 + }, + { + "epoch": 0.3311050590080053, + "grad_norm": 3.0900779572541337, + "learning_rate": 2.3416089364289057e-06, + "loss": 0.5708, + "step": 8024 + }, + { + "epoch": 0.3311463233473632, + "grad_norm": 3.6700931729900272, + "learning_rate": 2.34144298157559e-06, + "loss": 0.5858, + "step": 8025 + }, + { + "epoch": 0.33118758768672113, + "grad_norm": 6.661117504558769, + "learning_rate": 2.3412770116919976e-06, + "loss": 0.5304, + "step": 8026 + }, + { + "epoch": 0.33122885202607905, + "grad_norm": 2.828436563583452, + "learning_rate": 2.3411110267810934e-06, + "loss": 0.5304, + "step": 8027 + }, + { + "epoch": 0.33127011636543696, + "grad_norm": 2.662398231433782, + "learning_rate": 2.340945026845843e-06, + "loss": 0.4992, + "step": 8028 + }, + { + "epoch": 0.33131138070479493, + "grad_norm": 3.4934323211458143, + "learning_rate": 2.3407790118892096e-06, + "loss": 0.6011, + "step": 8029 + }, + { + "epoch": 0.33135264504415285, + "grad_norm": 6.037739813044314, + "learning_rate": 2.3406129819141604e-06, + "loss": 0.5647, + "step": 8030 + }, + { + "epoch": 0.33139390938351077, + "grad_norm": 4.009552551900243, + "learning_rate": 2.34044693692366e-06, + "loss": 0.5303, + "step": 8031 + }, + { + "epoch": 0.3314351737228687, + "grad_norm": 3.956307387481508, + "learning_rate": 2.340280876920675e-06, + "loss": 0.649, + "step": 8032 + }, + { + "epoch": 0.3314764380622266, + "grad_norm": 6.231551148032749, + "learning_rate": 2.3401148019081714e-06, + "loss": 0.5148, + "step": 8033 + }, + { + "epoch": 0.33151770240158457, + "grad_norm": 11.176386817066552, + "learning_rate": 2.3399487118891157e-06, + "loss": 0.5543, + "step": 8034 + }, + { + "epoch": 0.3315589667409425, + "grad_norm": 2.551481410691863, + "learning_rate": 2.3397826068664756e-06, + "loss": 0.4992, + "step": 8035 + }, + { + "epoch": 0.3316002310803004, + "grad_norm": 48.97061164456829, + "learning_rate": 2.339616486843216e-06, + "loss": 0.5525, + "step": 8036 + }, + { + "epoch": 0.3316414954196583, + "grad_norm": 6.991620032717567, + "learning_rate": 2.3394503518223066e-06, + "loss": 0.5846, + "step": 8037 + }, + { + "epoch": 0.33168275975901623, + "grad_norm": 110.45410503702563, + "learning_rate": 2.3392842018067133e-06, + "loss": 0.5771, + "step": 8038 + }, + { + "epoch": 0.3317240240983742, + "grad_norm": 12.8705943450307, + "learning_rate": 2.339118036799405e-06, + "loss": 0.5288, + "step": 8039 + }, + { + "epoch": 0.3317652884377321, + "grad_norm": 3.7890589609640535, + "learning_rate": 2.338951856803349e-06, + "loss": 0.5674, + "step": 8040 + }, + { + "epoch": 0.33180655277709004, + "grad_norm": 2.78414354409988, + "learning_rate": 2.3387856618215143e-06, + "loss": 0.5757, + "step": 8041 + }, + { + "epoch": 0.33184781711644795, + "grad_norm": 9.472456528997313, + "learning_rate": 2.33861945185687e-06, + "loss": 0.5695, + "step": 8042 + }, + { + "epoch": 0.33188908145580587, + "grad_norm": 13.923457544254626, + "learning_rate": 2.3384532269123837e-06, + "loss": 0.5757, + "step": 8043 + }, + { + "epoch": 0.33193034579516384, + "grad_norm": 5.298188964569179, + "learning_rate": 2.3382869869910253e-06, + "loss": 0.5419, + "step": 8044 + }, + { + "epoch": 0.33197161013452176, + "grad_norm": 5.102305144084442, + "learning_rate": 2.338120732095764e-06, + "loss": 0.5135, + "step": 8045 + }, + { + "epoch": 0.33201287447387967, + "grad_norm": 3.679455482193057, + "learning_rate": 2.33795446222957e-06, + "loss": 0.5929, + "step": 8046 + }, + { + "epoch": 0.3320541388132376, + "grad_norm": 2.168058905796646, + "learning_rate": 2.3377881773954133e-06, + "loss": 0.4972, + "step": 8047 + }, + { + "epoch": 0.3320954031525955, + "grad_norm": 3.418975499263353, + "learning_rate": 2.337621877596264e-06, + "loss": 0.5579, + "step": 8048 + }, + { + "epoch": 0.3321366674919535, + "grad_norm": 2.298212811881013, + "learning_rate": 2.3374555628350923e-06, + "loss": 0.5536, + "step": 8049 + }, + { + "epoch": 0.3321779318313114, + "grad_norm": 6.022059649582587, + "learning_rate": 2.337289233114869e-06, + "loss": 0.5552, + "step": 8050 + }, + { + "epoch": 0.3322191961706693, + "grad_norm": 3.7747594965048665, + "learning_rate": 2.3371228884385658e-06, + "loss": 0.5478, + "step": 8051 + }, + { + "epoch": 0.3322604605100272, + "grad_norm": 5.5629075779773505, + "learning_rate": 2.3369565288091537e-06, + "loss": 0.5439, + "step": 8052 + }, + { + "epoch": 0.33230172484938514, + "grad_norm": 2.466824498470237, + "learning_rate": 2.3367901542296042e-06, + "loss": 0.5429, + "step": 8053 + }, + { + "epoch": 0.3323429891887431, + "grad_norm": 4.314639300128079, + "learning_rate": 2.336623764702889e-06, + "loss": 0.5778, + "step": 8054 + }, + { + "epoch": 0.332384253528101, + "grad_norm": 5.782239425351576, + "learning_rate": 2.3364573602319806e-06, + "loss": 0.5159, + "step": 8055 + }, + { + "epoch": 0.33242551786745894, + "grad_norm": 4.211192079862366, + "learning_rate": 2.3362909408198514e-06, + "loss": 0.5212, + "step": 8056 + }, + { + "epoch": 0.33246678220681686, + "grad_norm": 2.706252860712733, + "learning_rate": 2.3361245064694736e-06, + "loss": 0.5242, + "step": 8057 + }, + { + "epoch": 0.3325080465461748, + "grad_norm": 14.134667071200557, + "learning_rate": 2.3359580571838205e-06, + "loss": 0.5893, + "step": 8058 + }, + { + "epoch": 0.33254931088553275, + "grad_norm": 3.653130999888262, + "learning_rate": 2.3357915929658655e-06, + "loss": 0.5798, + "step": 8059 + }, + { + "epoch": 0.33259057522489066, + "grad_norm": 2.021193035450295, + "learning_rate": 2.335625113818581e-06, + "loss": 0.5716, + "step": 8060 + }, + { + "epoch": 0.3326318395642486, + "grad_norm": 2.739626375443339, + "learning_rate": 2.335458619744943e-06, + "loss": 0.5412, + "step": 8061 + }, + { + "epoch": 0.3326731039036065, + "grad_norm": 2.9521146724271836, + "learning_rate": 2.335292110747923e-06, + "loss": 0.4941, + "step": 8062 + }, + { + "epoch": 0.3327143682429644, + "grad_norm": 6.6663164942211806, + "learning_rate": 2.335125586830497e-06, + "loss": 0.533, + "step": 8063 + }, + { + "epoch": 0.3327556325823224, + "grad_norm": 14.314635901877862, + "learning_rate": 2.334959047995638e-06, + "loss": 0.5633, + "step": 8064 + }, + { + "epoch": 0.3327968969216803, + "grad_norm": 5.942522297768379, + "learning_rate": 2.334792494246322e-06, + "loss": 0.5569, + "step": 8065 + }, + { + "epoch": 0.3328381612610382, + "grad_norm": 5.138076448631908, + "learning_rate": 2.3346259255855238e-06, + "loss": 0.5497, + "step": 8066 + }, + { + "epoch": 0.33287942560039613, + "grad_norm": 3.0322747357632314, + "learning_rate": 2.334459342016219e-06, + "loss": 0.5394, + "step": 8067 + }, + { + "epoch": 0.33292068993975404, + "grad_norm": 20.443669466909807, + "learning_rate": 2.3342927435413827e-06, + "loss": 0.5296, + "step": 8068 + }, + { + "epoch": 0.332961954279112, + "grad_norm": 3.4900378020477665, + "learning_rate": 2.3341261301639906e-06, + "loss": 0.554, + "step": 8069 + }, + { + "epoch": 0.33300321861846993, + "grad_norm": 2.3317079046599525, + "learning_rate": 2.3339595018870192e-06, + "loss": 0.5205, + "step": 8070 + }, + { + "epoch": 0.33304448295782785, + "grad_norm": 5.571547443175751, + "learning_rate": 2.333792858713445e-06, + "loss": 0.5567, + "step": 8071 + }, + { + "epoch": 0.33308574729718576, + "grad_norm": 5.2701768652332674, + "learning_rate": 2.3336262006462444e-06, + "loss": 0.5576, + "step": 8072 + }, + { + "epoch": 0.3331270116365437, + "grad_norm": 3.5801510416045055, + "learning_rate": 2.3334595276883947e-06, + "loss": 0.547, + "step": 8073 + }, + { + "epoch": 0.33316827597590165, + "grad_norm": 2.0452134135603437, + "learning_rate": 2.333292839842873e-06, + "loss": 0.4489, + "step": 8074 + }, + { + "epoch": 0.33320954031525957, + "grad_norm": 6.033807557377229, + "learning_rate": 2.333126137112656e-06, + "loss": 0.5644, + "step": 8075 + }, + { + "epoch": 0.3332508046546175, + "grad_norm": 18.569878605543675, + "learning_rate": 2.332959419500722e-06, + "loss": 0.5379, + "step": 8076 + }, + { + "epoch": 0.3332920689939754, + "grad_norm": 3.00800181208064, + "learning_rate": 2.3327926870100494e-06, + "loss": 0.4819, + "step": 8077 + }, + { + "epoch": 0.3333333333333333, + "grad_norm": 3.244311549320708, + "learning_rate": 2.332625939643616e-06, + "loss": 0.5474, + "step": 8078 + }, + { + "epoch": 0.3333745976726913, + "grad_norm": 10.82558003437984, + "learning_rate": 2.3324591774044004e-06, + "loss": 0.5316, + "step": 8079 + }, + { + "epoch": 0.3334158620120492, + "grad_norm": 4.153975298231833, + "learning_rate": 2.332292400295381e-06, + "loss": 0.5266, + "step": 8080 + }, + { + "epoch": 0.3334571263514071, + "grad_norm": 4.609146931995572, + "learning_rate": 2.3321256083195383e-06, + "loss": 0.5537, + "step": 8081 + }, + { + "epoch": 0.33349839069076503, + "grad_norm": 3.2107159290779705, + "learning_rate": 2.3319588014798495e-06, + "loss": 0.5109, + "step": 8082 + }, + { + "epoch": 0.33353965503012295, + "grad_norm": 3.406707141150943, + "learning_rate": 2.3317919797792962e-06, + "loss": 0.5106, + "step": 8083 + }, + { + "epoch": 0.3335809193694809, + "grad_norm": 3.7530679231973445, + "learning_rate": 2.3316251432208563e-06, + "loss": 0.5188, + "step": 8084 + }, + { + "epoch": 0.33362218370883884, + "grad_norm": 2.3449758480921394, + "learning_rate": 2.3314582918075113e-06, + "loss": 0.5429, + "step": 8085 + }, + { + "epoch": 0.33366344804819675, + "grad_norm": 4.631996187050229, + "learning_rate": 2.3312914255422414e-06, + "loss": 0.5115, + "step": 8086 + }, + { + "epoch": 0.33370471238755467, + "grad_norm": 6.499533033672716, + "learning_rate": 2.331124544428027e-06, + "loss": 0.5213, + "step": 8087 + }, + { + "epoch": 0.3337459767269126, + "grad_norm": 2.190113276284981, + "learning_rate": 2.3309576484678492e-06, + "loss": 0.5478, + "step": 8088 + }, + { + "epoch": 0.3337872410662705, + "grad_norm": 6.3427388230409365, + "learning_rate": 2.330790737664689e-06, + "loss": 0.5264, + "step": 8089 + }, + { + "epoch": 0.3338285054056285, + "grad_norm": 4.027565414166471, + "learning_rate": 2.330623812021528e-06, + "loss": 0.5573, + "step": 8090 + }, + { + "epoch": 0.3338697697449864, + "grad_norm": 4.722681132147144, + "learning_rate": 2.330456871541348e-06, + "loss": 0.5656, + "step": 8091 + }, + { + "epoch": 0.3339110340843443, + "grad_norm": 2.130847383798799, + "learning_rate": 2.33028991622713e-06, + "loss": 0.5322, + "step": 8092 + }, + { + "epoch": 0.3339522984237022, + "grad_norm": 4.896914468331676, + "learning_rate": 2.3301229460818576e-06, + "loss": 0.4626, + "step": 8093 + }, + { + "epoch": 0.33399356276306014, + "grad_norm": 13.699464417215847, + "learning_rate": 2.329955961108513e-06, + "loss": 0.6118, + "step": 8094 + }, + { + "epoch": 0.3340348271024181, + "grad_norm": 2.6546368730900562, + "learning_rate": 2.3297889613100782e-06, + "loss": 0.5193, + "step": 8095 + }, + { + "epoch": 0.334076091441776, + "grad_norm": 13.841528032714823, + "learning_rate": 2.329621946689537e-06, + "loss": 0.5398, + "step": 8096 + }, + { + "epoch": 0.33411735578113394, + "grad_norm": 6.503644115639246, + "learning_rate": 2.329454917249872e-06, + "loss": 0.5076, + "step": 8097 + }, + { + "epoch": 0.33415862012049186, + "grad_norm": 3.7190345378277057, + "learning_rate": 2.329287872994067e-06, + "loss": 0.515, + "step": 8098 + }, + { + "epoch": 0.33419988445984977, + "grad_norm": 2.634876807341596, + "learning_rate": 2.329120813925107e-06, + "loss": 0.5763, + "step": 8099 + }, + { + "epoch": 0.33424114879920774, + "grad_norm": 3.5095617754789155, + "learning_rate": 2.328953740045975e-06, + "loss": 0.5255, + "step": 8100 + }, + { + "epoch": 0.33428241313856566, + "grad_norm": 3.2202105006675086, + "learning_rate": 2.328786651359655e-06, + "loss": 0.5621, + "step": 8101 + }, + { + "epoch": 0.3343236774779236, + "grad_norm": 2.570730929760501, + "learning_rate": 2.328619547869132e-06, + "loss": 0.5312, + "step": 8102 + }, + { + "epoch": 0.3343649418172815, + "grad_norm": 5.701198821637458, + "learning_rate": 2.3284524295773914e-06, + "loss": 0.5219, + "step": 8103 + }, + { + "epoch": 0.3344062061566394, + "grad_norm": 2.5368866541559605, + "learning_rate": 2.3282852964874177e-06, + "loss": 0.501, + "step": 8104 + }, + { + "epoch": 0.3344474704959974, + "grad_norm": 9.441568954829814, + "learning_rate": 2.3281181486021964e-06, + "loss": 0.5424, + "step": 8105 + }, + { + "epoch": 0.3344887348353553, + "grad_norm": 3.1533866878085544, + "learning_rate": 2.327950985924714e-06, + "loss": 0.5328, + "step": 8106 + }, + { + "epoch": 0.3345299991747132, + "grad_norm": 3.0419382541700557, + "learning_rate": 2.327783808457955e-06, + "loss": 0.5817, + "step": 8107 + }, + { + "epoch": 0.3345712635140711, + "grad_norm": 2.366272273643385, + "learning_rate": 2.3276166162049067e-06, + "loss": 0.4825, + "step": 8108 + }, + { + "epoch": 0.33461252785342904, + "grad_norm": 5.524634380015645, + "learning_rate": 2.3274494091685552e-06, + "loss": 0.565, + "step": 8109 + }, + { + "epoch": 0.334653792192787, + "grad_norm": 2.9855808839170543, + "learning_rate": 2.3272821873518876e-06, + "loss": 0.4422, + "step": 8110 + }, + { + "epoch": 0.33469505653214493, + "grad_norm": 3.447638450028278, + "learning_rate": 2.3271149507578903e-06, + "loss": 0.5464, + "step": 8111 + }, + { + "epoch": 0.33473632087150285, + "grad_norm": 3.83285643826788, + "learning_rate": 2.326947699389551e-06, + "loss": 0.5107, + "step": 8112 + }, + { + "epoch": 0.33477758521086076, + "grad_norm": 74.52646964360947, + "learning_rate": 2.326780433249857e-06, + "loss": 0.5634, + "step": 8113 + }, + { + "epoch": 0.3348188495502187, + "grad_norm": 4.626376759968479, + "learning_rate": 2.3266131523417957e-06, + "loss": 0.5444, + "step": 8114 + }, + { + "epoch": 0.33486011388957665, + "grad_norm": 2.283659945101633, + "learning_rate": 2.326445856668356e-06, + "loss": 0.5326, + "step": 8115 + }, + { + "epoch": 0.33490137822893457, + "grad_norm": 3.536041940963327, + "learning_rate": 2.3262785462325266e-06, + "loss": 0.5089, + "step": 8116 + }, + { + "epoch": 0.3349426425682925, + "grad_norm": 4.44291632528756, + "learning_rate": 2.3261112210372944e-06, + "loss": 0.5062, + "step": 8117 + }, + { + "epoch": 0.3349839069076504, + "grad_norm": 6.719504205573048, + "learning_rate": 2.3259438810856493e-06, + "loss": 0.5645, + "step": 8118 + }, + { + "epoch": 0.3350251712470083, + "grad_norm": 3.092972764141458, + "learning_rate": 2.3257765263805804e-06, + "loss": 0.5019, + "step": 8119 + }, + { + "epoch": 0.3350664355863663, + "grad_norm": 3.0215969291815514, + "learning_rate": 2.3256091569250773e-06, + "loss": 0.5759, + "step": 8120 + }, + { + "epoch": 0.3351076999257242, + "grad_norm": 3.5069178610290694, + "learning_rate": 2.325441772722129e-06, + "loss": 0.551, + "step": 8121 + }, + { + "epoch": 0.3351489642650821, + "grad_norm": 4.821681648111683, + "learning_rate": 2.3252743737747257e-06, + "loss": 0.5307, + "step": 8122 + }, + { + "epoch": 0.33519022860444003, + "grad_norm": 1.9393458239876264, + "learning_rate": 2.3251069600858576e-06, + "loss": 0.5643, + "step": 8123 + }, + { + "epoch": 0.33523149294379795, + "grad_norm": 2.4830799998324973, + "learning_rate": 2.3249395316585147e-06, + "loss": 0.5253, + "step": 8124 + }, + { + "epoch": 0.3352727572831559, + "grad_norm": 2.916257923541919, + "learning_rate": 2.3247720884956885e-06, + "loss": 0.5605, + "step": 8125 + }, + { + "epoch": 0.33531402162251384, + "grad_norm": 4.139536010564511, + "learning_rate": 2.3246046306003696e-06, + "loss": 0.5567, + "step": 8126 + }, + { + "epoch": 0.33535528596187175, + "grad_norm": 6.562933118761652, + "learning_rate": 2.324437157975549e-06, + "loss": 0.5633, + "step": 8127 + }, + { + "epoch": 0.33539655030122967, + "grad_norm": 7.403383591904467, + "learning_rate": 2.324269670624218e-06, + "loss": 0.5525, + "step": 8128 + }, + { + "epoch": 0.3354378146405876, + "grad_norm": 4.729709146494971, + "learning_rate": 2.324102168549369e-06, + "loss": 0.5025, + "step": 8129 + }, + { + "epoch": 0.33547907897994556, + "grad_norm": 2.8051144400142394, + "learning_rate": 2.323934651753994e-06, + "loss": 0.528, + "step": 8130 + }, + { + "epoch": 0.33552034331930347, + "grad_norm": 4.707269386390914, + "learning_rate": 2.323767120241085e-06, + "loss": 0.5564, + "step": 8131 + }, + { + "epoch": 0.3355616076586614, + "grad_norm": 2.749831021078922, + "learning_rate": 2.323599574013634e-06, + "loss": 0.5163, + "step": 8132 + }, + { + "epoch": 0.3356028719980193, + "grad_norm": 6.148666703597966, + "learning_rate": 2.3234320130746347e-06, + "loss": 0.5193, + "step": 8133 + }, + { + "epoch": 0.3356441363373772, + "grad_norm": 10.071712837939003, + "learning_rate": 2.323264437427079e-06, + "loss": 0.5553, + "step": 8134 + }, + { + "epoch": 0.3356854006767352, + "grad_norm": 4.51175378275745, + "learning_rate": 2.3230968470739616e-06, + "loss": 0.5485, + "step": 8135 + }, + { + "epoch": 0.3357266650160931, + "grad_norm": 3.6016857348006153, + "learning_rate": 2.322929242018275e-06, + "loss": 0.5868, + "step": 8136 + }, + { + "epoch": 0.335767929355451, + "grad_norm": 8.865527725420918, + "learning_rate": 2.322761622263014e-06, + "loss": 0.5449, + "step": 8137 + }, + { + "epoch": 0.33580919369480894, + "grad_norm": 3.2356715340772166, + "learning_rate": 2.3225939878111716e-06, + "loss": 0.5593, + "step": 8138 + }, + { + "epoch": 0.33585045803416685, + "grad_norm": 6.544332311011258, + "learning_rate": 2.3224263386657434e-06, + "loss": 0.5154, + "step": 8139 + }, + { + "epoch": 0.3358917223735248, + "grad_norm": 11.265669217098868, + "learning_rate": 2.322258674829723e-06, + "loss": 0.5566, + "step": 8140 + }, + { + "epoch": 0.33593298671288274, + "grad_norm": 2.723253512826104, + "learning_rate": 2.3220909963061056e-06, + "loss": 0.5698, + "step": 8141 + }, + { + "epoch": 0.33597425105224066, + "grad_norm": 2.6139201863127726, + "learning_rate": 2.3219233030978866e-06, + "loss": 0.4744, + "step": 8142 + }, + { + "epoch": 0.3360155153915986, + "grad_norm": 5.047094139843417, + "learning_rate": 2.3217555952080613e-06, + "loss": 0.5577, + "step": 8143 + }, + { + "epoch": 0.3360567797309565, + "grad_norm": 3.342065190497795, + "learning_rate": 2.321587872639625e-06, + "loss": 0.5403, + "step": 8144 + }, + { + "epoch": 0.33609804407031446, + "grad_norm": 3.022363187463244, + "learning_rate": 2.3214201353955743e-06, + "loss": 0.5385, + "step": 8145 + }, + { + "epoch": 0.3361393084096724, + "grad_norm": 2.732581053477713, + "learning_rate": 2.321252383478906e-06, + "loss": 0.5552, + "step": 8146 + }, + { + "epoch": 0.3361805727490303, + "grad_norm": 5.569468420590877, + "learning_rate": 2.321084616892614e-06, + "loss": 0.5513, + "step": 8147 + }, + { + "epoch": 0.3362218370883882, + "grad_norm": 3.6216749899825236, + "learning_rate": 2.3209168356396978e-06, + "loss": 0.5277, + "step": 8148 + }, + { + "epoch": 0.3362631014277461, + "grad_norm": 3.157247505679042, + "learning_rate": 2.3207490397231533e-06, + "loss": 0.5119, + "step": 8149 + }, + { + "epoch": 0.33630436576710404, + "grad_norm": 11.401158656403918, + "learning_rate": 2.320581229145977e-06, + "loss": 0.4996, + "step": 8150 + }, + { + "epoch": 0.336345630106462, + "grad_norm": 4.882141560462928, + "learning_rate": 2.320413403911168e-06, + "loss": 0.5206, + "step": 8151 + }, + { + "epoch": 0.33638689444581993, + "grad_norm": 7.787684595556056, + "learning_rate": 2.3202455640217225e-06, + "loss": 0.5712, + "step": 8152 + }, + { + "epoch": 0.33642815878517784, + "grad_norm": 3.7537246457881412, + "learning_rate": 2.32007770948064e-06, + "loss": 0.5644, + "step": 8153 + }, + { + "epoch": 0.33646942312453576, + "grad_norm": 2.4002046503847527, + "learning_rate": 2.319909840290918e-06, + "loss": 0.5674, + "step": 8154 + }, + { + "epoch": 0.3365106874638937, + "grad_norm": 3.159624585712956, + "learning_rate": 2.319741956455555e-06, + "loss": 0.5078, + "step": 8155 + }, + { + "epoch": 0.33655195180325165, + "grad_norm": 4.008986760869622, + "learning_rate": 2.3195740579775493e-06, + "loss": 0.585, + "step": 8156 + }, + { + "epoch": 0.33659321614260956, + "grad_norm": 2.6944008717869865, + "learning_rate": 2.3194061448599015e-06, + "loss": 0.4757, + "step": 8157 + }, + { + "epoch": 0.3366344804819675, + "grad_norm": 3.7685303074841188, + "learning_rate": 2.31923821710561e-06, + "loss": 0.5636, + "step": 8158 + }, + { + "epoch": 0.3366757448213254, + "grad_norm": 2.61481354476918, + "learning_rate": 2.319070274717674e-06, + "loss": 0.5157, + "step": 8159 + }, + { + "epoch": 0.3367170091606833, + "grad_norm": 2.2120816262771075, + "learning_rate": 2.3189023176990946e-06, + "loss": 0.4818, + "step": 8160 + }, + { + "epoch": 0.3367582735000413, + "grad_norm": 27.198950019712278, + "learning_rate": 2.3187343460528707e-06, + "loss": 0.5295, + "step": 8161 + }, + { + "epoch": 0.3367995378393992, + "grad_norm": 7.919311373541278, + "learning_rate": 2.3185663597820034e-06, + "loss": 0.5441, + "step": 8162 + }, + { + "epoch": 0.3368408021787571, + "grad_norm": 3.0048163287229848, + "learning_rate": 2.3183983588894933e-06, + "loss": 0.5349, + "step": 8163 + }, + { + "epoch": 0.33688206651811503, + "grad_norm": 5.442498723855179, + "learning_rate": 2.318230343378341e-06, + "loss": 0.603, + "step": 8164 + }, + { + "epoch": 0.33692333085747295, + "grad_norm": 3.149113028020985, + "learning_rate": 2.318062313251548e-06, + "loss": 0.5746, + "step": 8165 + }, + { + "epoch": 0.3369645951968309, + "grad_norm": 3.0224110688470986, + "learning_rate": 2.3178942685121155e-06, + "loss": 0.4866, + "step": 8166 + }, + { + "epoch": 0.33700585953618883, + "grad_norm": 15.363379932410412, + "learning_rate": 2.3177262091630457e-06, + "loss": 0.5255, + "step": 8167 + }, + { + "epoch": 0.33704712387554675, + "grad_norm": 2.172288610797106, + "learning_rate": 2.3175581352073396e-06, + "loss": 0.4899, + "step": 8168 + }, + { + "epoch": 0.33708838821490467, + "grad_norm": 3.5807613307731465, + "learning_rate": 2.3173900466480003e-06, + "loss": 0.5063, + "step": 8169 + }, + { + "epoch": 0.3371296525542626, + "grad_norm": 4.387749232090683, + "learning_rate": 2.31722194348803e-06, + "loss": 0.5296, + "step": 8170 + }, + { + "epoch": 0.33717091689362055, + "grad_norm": 4.573742235828096, + "learning_rate": 2.317053825730431e-06, + "loss": 0.5534, + "step": 8171 + }, + { + "epoch": 0.33721218123297847, + "grad_norm": 3.615383840363885, + "learning_rate": 2.3168856933782073e-06, + "loss": 0.5323, + "step": 8172 + }, + { + "epoch": 0.3372534455723364, + "grad_norm": 3.2085461859959543, + "learning_rate": 2.3167175464343616e-06, + "loss": 0.5657, + "step": 8173 + }, + { + "epoch": 0.3372947099116943, + "grad_norm": 3.508319047350895, + "learning_rate": 2.3165493849018967e-06, + "loss": 0.5943, + "step": 8174 + }, + { + "epoch": 0.3373359742510522, + "grad_norm": 6.42257992286478, + "learning_rate": 2.3163812087838176e-06, + "loss": 0.5172, + "step": 8175 + }, + { + "epoch": 0.3373772385904102, + "grad_norm": 2.4960458853384955, + "learning_rate": 2.3162130180831278e-06, + "loss": 0.5201, + "step": 8176 + }, + { + "epoch": 0.3374185029297681, + "grad_norm": 7.9382515875643005, + "learning_rate": 2.3160448128028312e-06, + "loss": 0.5707, + "step": 8177 + }, + { + "epoch": 0.337459767269126, + "grad_norm": 2.7041445812679314, + "learning_rate": 2.3158765929459333e-06, + "loss": 0.6104, + "step": 8178 + }, + { + "epoch": 0.33750103160848394, + "grad_norm": 4.147516695316713, + "learning_rate": 2.3157083585154388e-06, + "loss": 0.6048, + "step": 8179 + }, + { + "epoch": 0.33754229594784185, + "grad_norm": 2.562933142855936, + "learning_rate": 2.315540109514352e-06, + "loss": 0.5297, + "step": 8180 + }, + { + "epoch": 0.3375835602871998, + "grad_norm": 3.684153527609689, + "learning_rate": 2.315371845945678e-06, + "loss": 0.5543, + "step": 8181 + }, + { + "epoch": 0.33762482462655774, + "grad_norm": 5.185203381414148, + "learning_rate": 2.3152035678124242e-06, + "loss": 0.5696, + "step": 8182 + }, + { + "epoch": 0.33766608896591566, + "grad_norm": 3.8893305921792654, + "learning_rate": 2.3150352751175945e-06, + "loss": 0.4989, + "step": 8183 + }, + { + "epoch": 0.33770735330527357, + "grad_norm": 3.3559429643398366, + "learning_rate": 2.314866967864196e-06, + "loss": 0.6116, + "step": 8184 + }, + { + "epoch": 0.3377486176446315, + "grad_norm": 14.29380534788505, + "learning_rate": 2.3146986460552354e-06, + "loss": 0.5732, + "step": 8185 + }, + { + "epoch": 0.33778988198398946, + "grad_norm": 2.8861144574448634, + "learning_rate": 2.3145303096937193e-06, + "loss": 0.5061, + "step": 8186 + }, + { + "epoch": 0.3378311463233474, + "grad_norm": 9.23191829675983, + "learning_rate": 2.3143619587826535e-06, + "loss": 0.5221, + "step": 8187 + }, + { + "epoch": 0.3378724106627053, + "grad_norm": 2.4516921768279163, + "learning_rate": 2.314193593325046e-06, + "loss": 0.5034, + "step": 8188 + }, + { + "epoch": 0.3379136750020632, + "grad_norm": 2.714220945274878, + "learning_rate": 2.314025213323904e-06, + "loss": 0.5093, + "step": 8189 + }, + { + "epoch": 0.3379549393414211, + "grad_norm": 2.266481724327917, + "learning_rate": 2.3138568187822356e-06, + "loss": 0.5137, + "step": 8190 + }, + { + "epoch": 0.3379962036807791, + "grad_norm": 7.783096537297339, + "learning_rate": 2.313688409703048e-06, + "loss": 0.543, + "step": 8191 + }, + { + "epoch": 0.338037468020137, + "grad_norm": 3.0527946601302274, + "learning_rate": 2.3135199860893508e-06, + "loss": 0.606, + "step": 8192 + }, + { + "epoch": 0.3380787323594949, + "grad_norm": 2.9826666236135666, + "learning_rate": 2.313351547944151e-06, + "loss": 0.5502, + "step": 8193 + }, + { + "epoch": 0.33811999669885284, + "grad_norm": 3.2337391919907374, + "learning_rate": 2.3131830952704575e-06, + "loss": 0.5619, + "step": 8194 + }, + { + "epoch": 0.33816126103821076, + "grad_norm": 4.882554509424427, + "learning_rate": 2.31301462807128e-06, + "loss": 0.5363, + "step": 8195 + }, + { + "epoch": 0.33820252537756873, + "grad_norm": 2.1130601935642015, + "learning_rate": 2.3128461463496273e-06, + "loss": 0.5359, + "step": 8196 + }, + { + "epoch": 0.33824378971692665, + "grad_norm": 5.779095443514704, + "learning_rate": 2.312677650108509e-06, + "loss": 0.5216, + "step": 8197 + }, + { + "epoch": 0.33828505405628456, + "grad_norm": 3.2618446175587175, + "learning_rate": 2.312509139350935e-06, + "loss": 0.5205, + "step": 8198 + }, + { + "epoch": 0.3383263183956425, + "grad_norm": 29.232793227421823, + "learning_rate": 2.3123406140799155e-06, + "loss": 0.528, + "step": 8199 + }, + { + "epoch": 0.3383675827350004, + "grad_norm": 11.492419059240406, + "learning_rate": 2.31217207429846e-06, + "loss": 0.6234, + "step": 8200 + }, + { + "epoch": 0.33840884707435837, + "grad_norm": 3.2428567151871133, + "learning_rate": 2.3120035200095796e-06, + "loss": 0.534, + "step": 8201 + }, + { + "epoch": 0.3384501114137163, + "grad_norm": 5.5972123667292815, + "learning_rate": 2.311834951216285e-06, + "loss": 0.5635, + "step": 8202 + }, + { + "epoch": 0.3384913757530742, + "grad_norm": 4.480596988290722, + "learning_rate": 2.311666367921587e-06, + "loss": 0.5072, + "step": 8203 + }, + { + "epoch": 0.3385326400924321, + "grad_norm": 2.65821678548113, + "learning_rate": 2.3114977701284974e-06, + "loss": 0.5172, + "step": 8204 + }, + { + "epoch": 0.33857390443179003, + "grad_norm": 4.080172811722075, + "learning_rate": 2.3113291578400276e-06, + "loss": 0.5671, + "step": 8205 + }, + { + "epoch": 0.338615168771148, + "grad_norm": 3.313122642146533, + "learning_rate": 2.3111605310591895e-06, + "loss": 0.5025, + "step": 8206 + }, + { + "epoch": 0.3386564331105059, + "grad_norm": 4.329920277677524, + "learning_rate": 2.310991889788995e-06, + "loss": 0.5783, + "step": 8207 + }, + { + "epoch": 0.33869769744986383, + "grad_norm": 2.8857264640887514, + "learning_rate": 2.3108232340324568e-06, + "loss": 0.5291, + "step": 8208 + }, + { + "epoch": 0.33873896178922175, + "grad_norm": 8.275212723406318, + "learning_rate": 2.3106545637925867e-06, + "loss": 0.5307, + "step": 8209 + }, + { + "epoch": 0.33878022612857966, + "grad_norm": 6.656910618206778, + "learning_rate": 2.3104858790723987e-06, + "loss": 0.5743, + "step": 8210 + }, + { + "epoch": 0.33882149046793764, + "grad_norm": 2.4053455517725197, + "learning_rate": 2.3103171798749053e-06, + "loss": 0.5267, + "step": 8211 + }, + { + "epoch": 0.33886275480729555, + "grad_norm": 4.097582733084946, + "learning_rate": 2.3101484662031197e-06, + "loss": 0.544, + "step": 8212 + }, + { + "epoch": 0.33890401914665347, + "grad_norm": 6.435646731781296, + "learning_rate": 2.3099797380600562e-06, + "loss": 0.5378, + "step": 8213 + }, + { + "epoch": 0.3389452834860114, + "grad_norm": 2.7770801244077212, + "learning_rate": 2.309810995448728e-06, + "loss": 0.5409, + "step": 8214 + }, + { + "epoch": 0.3389865478253693, + "grad_norm": 11.098503220368553, + "learning_rate": 2.3096422383721494e-06, + "loss": 0.5661, + "step": 8215 + }, + { + "epoch": 0.3390278121647272, + "grad_norm": 2.192690290756656, + "learning_rate": 2.3094734668333353e-06, + "loss": 0.5359, + "step": 8216 + }, + { + "epoch": 0.3390690765040852, + "grad_norm": 3.832387391350676, + "learning_rate": 2.3093046808353004e-06, + "loss": 0.5466, + "step": 8217 + }, + { + "epoch": 0.3391103408434431, + "grad_norm": 2.3378419158028225, + "learning_rate": 2.3091358803810583e-06, + "loss": 0.5248, + "step": 8218 + }, + { + "epoch": 0.339151605182801, + "grad_norm": 3.15057870849358, + "learning_rate": 2.308967065473626e-06, + "loss": 0.5696, + "step": 8219 + }, + { + "epoch": 0.33919286952215894, + "grad_norm": 2.564343068128702, + "learning_rate": 2.3087982361160187e-06, + "loss": 0.5445, + "step": 8220 + }, + { + "epoch": 0.33923413386151685, + "grad_norm": 4.712441787543476, + "learning_rate": 2.3086293923112504e-06, + "loss": 0.5653, + "step": 8221 + }, + { + "epoch": 0.3392753982008748, + "grad_norm": 5.112014869986212, + "learning_rate": 2.308460534062339e-06, + "loss": 0.557, + "step": 8222 + }, + { + "epoch": 0.33931666254023274, + "grad_norm": 3.4711486389741477, + "learning_rate": 2.3082916613723e-06, + "loss": 0.559, + "step": 8223 + }, + { + "epoch": 0.33935792687959065, + "grad_norm": 3.6445761266579506, + "learning_rate": 2.30812277424415e-06, + "loss": 0.5062, + "step": 8224 + }, + { + "epoch": 0.33939919121894857, + "grad_norm": 7.526775686007153, + "learning_rate": 2.3079538726809054e-06, + "loss": 0.5916, + "step": 8225 + }, + { + "epoch": 0.3394404555583065, + "grad_norm": 4.074867155685022, + "learning_rate": 2.307784956685583e-06, + "loss": 0.5776, + "step": 8226 + }, + { + "epoch": 0.33948171989766446, + "grad_norm": 7.631928375525863, + "learning_rate": 2.3076160262612013e-06, + "loss": 0.5741, + "step": 8227 + }, + { + "epoch": 0.3395229842370224, + "grad_norm": 29.473434455929592, + "learning_rate": 2.307447081410777e-06, + "loss": 0.502, + "step": 8228 + }, + { + "epoch": 0.3395642485763803, + "grad_norm": 3.3137358008614037, + "learning_rate": 2.307278122137327e-06, + "loss": 0.5291, + "step": 8229 + }, + { + "epoch": 0.3396055129157382, + "grad_norm": 12.314629382280922, + "learning_rate": 2.3071091484438707e-06, + "loss": 0.6253, + "step": 8230 + }, + { + "epoch": 0.3396467772550961, + "grad_norm": 2.383910583091796, + "learning_rate": 2.3069401603334263e-06, + "loss": 0.5376, + "step": 8231 + }, + { + "epoch": 0.3396880415944541, + "grad_norm": 2.631339486070086, + "learning_rate": 2.3067711578090117e-06, + "loss": 0.5225, + "step": 8232 + }, + { + "epoch": 0.339729305933812, + "grad_norm": 3.034544465476516, + "learning_rate": 2.306602140873646e-06, + "loss": 0.5321, + "step": 8233 + }, + { + "epoch": 0.3397705702731699, + "grad_norm": 4.182693130448914, + "learning_rate": 2.3064331095303483e-06, + "loss": 0.5163, + "step": 8234 + }, + { + "epoch": 0.33981183461252784, + "grad_norm": 9.590875110327314, + "learning_rate": 2.3062640637821375e-06, + "loss": 0.534, + "step": 8235 + }, + { + "epoch": 0.33985309895188576, + "grad_norm": 21.553681430543673, + "learning_rate": 2.306095003632034e-06, + "loss": 0.561, + "step": 8236 + }, + { + "epoch": 0.33989436329124373, + "grad_norm": 4.03840766788469, + "learning_rate": 2.3059259290830573e-06, + "loss": 0.5766, + "step": 8237 + }, + { + "epoch": 0.33993562763060164, + "grad_norm": 6.5292249364498955, + "learning_rate": 2.3057568401382275e-06, + "loss": 0.514, + "step": 8238 + }, + { + "epoch": 0.33997689196995956, + "grad_norm": 4.889445202485623, + "learning_rate": 2.3055877368005643e-06, + "loss": 0.5452, + "step": 8239 + }, + { + "epoch": 0.3400181563093175, + "grad_norm": 13.553747568276572, + "learning_rate": 2.305418619073089e-06, + "loss": 0.5468, + "step": 8240 + }, + { + "epoch": 0.3400594206486754, + "grad_norm": 17.16947077020798, + "learning_rate": 2.3052494869588224e-06, + "loss": 0.4776, + "step": 8241 + }, + { + "epoch": 0.34010068498803336, + "grad_norm": 9.438295818463898, + "learning_rate": 2.3050803404607855e-06, + "loss": 0.5786, + "step": 8242 + }, + { + "epoch": 0.3401419493273913, + "grad_norm": 7.6296690052324365, + "learning_rate": 2.304911179582e-06, + "loss": 0.5787, + "step": 8243 + }, + { + "epoch": 0.3401832136667492, + "grad_norm": 2.366322355108208, + "learning_rate": 2.3047420043254864e-06, + "loss": 0.5649, + "step": 8244 + }, + { + "epoch": 0.3402244780061071, + "grad_norm": 3.9252424128086254, + "learning_rate": 2.3045728146942686e-06, + "loss": 0.5357, + "step": 8245 + }, + { + "epoch": 0.340265742345465, + "grad_norm": 2.8684821998621324, + "learning_rate": 2.304403610691367e-06, + "loss": 0.4846, + "step": 8246 + }, + { + "epoch": 0.340307006684823, + "grad_norm": 8.824684550417727, + "learning_rate": 2.3042343923198046e-06, + "loss": 0.5478, + "step": 8247 + }, + { + "epoch": 0.3403482710241809, + "grad_norm": 164.03487801396068, + "learning_rate": 2.304065159582604e-06, + "loss": 0.5117, + "step": 8248 + }, + { + "epoch": 0.34038953536353883, + "grad_norm": 3.413796080019445, + "learning_rate": 2.3038959124827885e-06, + "loss": 0.5732, + "step": 8249 + }, + { + "epoch": 0.34043079970289675, + "grad_norm": 3.1001155302112324, + "learning_rate": 2.3037266510233805e-06, + "loss": 0.5285, + "step": 8250 + }, + { + "epoch": 0.34047206404225466, + "grad_norm": 4.017625765248456, + "learning_rate": 2.3035573752074044e-06, + "loss": 0.5622, + "step": 8251 + }, + { + "epoch": 0.34051332838161263, + "grad_norm": 2.7980610490518045, + "learning_rate": 2.3033880850378833e-06, + "loss": 0.5011, + "step": 8252 + }, + { + "epoch": 0.34055459272097055, + "grad_norm": 3.4350266666945766, + "learning_rate": 2.303218780517841e-06, + "loss": 0.4936, + "step": 8253 + }, + { + "epoch": 0.34059585706032847, + "grad_norm": 2.8466726334374135, + "learning_rate": 2.303049461650302e-06, + "loss": 0.5639, + "step": 8254 + }, + { + "epoch": 0.3406371213996864, + "grad_norm": 3.708025209517022, + "learning_rate": 2.3028801284382908e-06, + "loss": 0.5492, + "step": 8255 + }, + { + "epoch": 0.3406783857390443, + "grad_norm": 6.837099012345069, + "learning_rate": 2.302710780884832e-06, + "loss": 0.5372, + "step": 8256 + }, + { + "epoch": 0.34071965007840227, + "grad_norm": 3.1951478431830966, + "learning_rate": 2.3025414189929506e-06, + "loss": 0.5627, + "step": 8257 + }, + { + "epoch": 0.3407609144177602, + "grad_norm": 3.974870175878017, + "learning_rate": 2.3023720427656714e-06, + "loss": 0.5187, + "step": 8258 + }, + { + "epoch": 0.3408021787571181, + "grad_norm": 2.7155622505623183, + "learning_rate": 2.3022026522060205e-06, + "loss": 0.54, + "step": 8259 + }, + { + "epoch": 0.340843443096476, + "grad_norm": 5.6285723689683795, + "learning_rate": 2.3020332473170234e-06, + "loss": 0.5741, + "step": 8260 + }, + { + "epoch": 0.34088470743583393, + "grad_norm": 2.6392722112843847, + "learning_rate": 2.3018638281017062e-06, + "loss": 0.4963, + "step": 8261 + }, + { + "epoch": 0.3409259717751919, + "grad_norm": 2.7047603229516346, + "learning_rate": 2.301694394563095e-06, + "loss": 0.5344, + "step": 8262 + }, + { + "epoch": 0.3409672361145498, + "grad_norm": 3.4415383359771545, + "learning_rate": 2.301524946704217e-06, + "loss": 0.5486, + "step": 8263 + }, + { + "epoch": 0.34100850045390774, + "grad_norm": 5.22968142431888, + "learning_rate": 2.3013554845280975e-06, + "loss": 0.5261, + "step": 8264 + }, + { + "epoch": 0.34104976479326565, + "grad_norm": 5.904406203674684, + "learning_rate": 2.3011860080377647e-06, + "loss": 0.5143, + "step": 8265 + }, + { + "epoch": 0.34109102913262357, + "grad_norm": 2.431339774998388, + "learning_rate": 2.3010165172362457e-06, + "loss": 0.493, + "step": 8266 + }, + { + "epoch": 0.34113229347198154, + "grad_norm": 3.1616256119262953, + "learning_rate": 2.300847012126568e-06, + "loss": 0.512, + "step": 8267 + }, + { + "epoch": 0.34117355781133946, + "grad_norm": 2.4905788424258595, + "learning_rate": 2.3006774927117588e-06, + "loss": 0.5298, + "step": 8268 + }, + { + "epoch": 0.34121482215069737, + "grad_norm": 2.7908379538166845, + "learning_rate": 2.3005079589948462e-06, + "loss": 0.5541, + "step": 8269 + }, + { + "epoch": 0.3412560864900553, + "grad_norm": 13.723321087192295, + "learning_rate": 2.3003384109788592e-06, + "loss": 0.5819, + "step": 8270 + }, + { + "epoch": 0.3412973508294132, + "grad_norm": 3.219699660005938, + "learning_rate": 2.3001688486668268e-06, + "loss": 0.5705, + "step": 8271 + }, + { + "epoch": 0.3413386151687712, + "grad_norm": 3.346881257151934, + "learning_rate": 2.2999992720617766e-06, + "loss": 0.5474, + "step": 8272 + }, + { + "epoch": 0.3413798795081291, + "grad_norm": 6.855033458836823, + "learning_rate": 2.2998296811667376e-06, + "loss": 0.5419, + "step": 8273 + }, + { + "epoch": 0.341421143847487, + "grad_norm": 2.2196362233075373, + "learning_rate": 2.29966007598474e-06, + "loss": 0.5529, + "step": 8274 + }, + { + "epoch": 0.3414624081868449, + "grad_norm": 3.5929946935091372, + "learning_rate": 2.2994904565188135e-06, + "loss": 0.5223, + "step": 8275 + }, + { + "epoch": 0.34150367252620284, + "grad_norm": 2.4473447500905343, + "learning_rate": 2.2993208227719868e-06, + "loss": 0.5344, + "step": 8276 + }, + { + "epoch": 0.34154493686556076, + "grad_norm": 5.685622204989258, + "learning_rate": 2.2991511747472902e-06, + "loss": 0.4964, + "step": 8277 + }, + { + "epoch": 0.3415862012049187, + "grad_norm": 19.23857038792225, + "learning_rate": 2.298981512447756e-06, + "loss": 0.5448, + "step": 8278 + }, + { + "epoch": 0.34162746554427664, + "grad_norm": 5.712662372918003, + "learning_rate": 2.298811835876412e-06, + "loss": 0.5324, + "step": 8279 + }, + { + "epoch": 0.34166872988363456, + "grad_norm": 6.9578632467503665, + "learning_rate": 2.2986421450362905e-06, + "loss": 0.5739, + "step": 8280 + }, + { + "epoch": 0.3417099942229925, + "grad_norm": 2.8560451774490407, + "learning_rate": 2.2984724399304222e-06, + "loss": 0.4786, + "step": 8281 + }, + { + "epoch": 0.3417512585623504, + "grad_norm": 3.273833209034194, + "learning_rate": 2.2983027205618393e-06, + "loss": 0.5343, + "step": 8282 + }, + { + "epoch": 0.34179252290170836, + "grad_norm": 7.844265669442772, + "learning_rate": 2.2981329869335723e-06, + "loss": 0.4799, + "step": 8283 + }, + { + "epoch": 0.3418337872410663, + "grad_norm": 2.632683248475478, + "learning_rate": 2.2979632390486536e-06, + "loss": 0.5241, + "step": 8284 + }, + { + "epoch": 0.3418750515804242, + "grad_norm": 5.324720315563383, + "learning_rate": 2.297793476910115e-06, + "loss": 0.5288, + "step": 8285 + }, + { + "epoch": 0.3419163159197821, + "grad_norm": 4.265622904682844, + "learning_rate": 2.29762370052099e-06, + "loss": 0.6245, + "step": 8286 + }, + { + "epoch": 0.34195758025914, + "grad_norm": 3.4523331837861515, + "learning_rate": 2.2974539098843096e-06, + "loss": 0.5944, + "step": 8287 + }, + { + "epoch": 0.341998844598498, + "grad_norm": 2.8946473524421963, + "learning_rate": 2.2972841050031076e-06, + "loss": 0.5281, + "step": 8288 + }, + { + "epoch": 0.3420401089378559, + "grad_norm": 5.044378203801935, + "learning_rate": 2.2971142858804167e-06, + "loss": 0.523, + "step": 8289 + }, + { + "epoch": 0.34208137327721383, + "grad_norm": 3.4989005506241555, + "learning_rate": 2.2969444525192713e-06, + "loss": 0.4701, + "step": 8290 + }, + { + "epoch": 0.34212263761657175, + "grad_norm": 2.5753324869151775, + "learning_rate": 2.296774604922704e-06, + "loss": 0.5898, + "step": 8291 + }, + { + "epoch": 0.34216390195592966, + "grad_norm": 3.1370379928634122, + "learning_rate": 2.296604743093749e-06, + "loss": 0.5402, + "step": 8292 + }, + { + "epoch": 0.34220516629528763, + "grad_norm": 4.901881463079657, + "learning_rate": 2.29643486703544e-06, + "loss": 0.5523, + "step": 8293 + }, + { + "epoch": 0.34224643063464555, + "grad_norm": 6.550459633656296, + "learning_rate": 2.296264976750812e-06, + "loss": 0.5138, + "step": 8294 + }, + { + "epoch": 0.34228769497400346, + "grad_norm": 2.6706419324458746, + "learning_rate": 2.2960950722429e-06, + "loss": 0.5529, + "step": 8295 + }, + { + "epoch": 0.3423289593133614, + "grad_norm": 3.9045425570415535, + "learning_rate": 2.2959251535147385e-06, + "loss": 0.536, + "step": 8296 + }, + { + "epoch": 0.3423702236527193, + "grad_norm": 2.5122345191005224, + "learning_rate": 2.2957552205693623e-06, + "loss": 0.516, + "step": 8297 + }, + { + "epoch": 0.34241148799207727, + "grad_norm": 1.9370289061301282, + "learning_rate": 2.2955852734098076e-06, + "loss": 0.5527, + "step": 8298 + }, + { + "epoch": 0.3424527523314352, + "grad_norm": 4.5712477477381555, + "learning_rate": 2.2954153120391098e-06, + "loss": 0.6175, + "step": 8299 + }, + { + "epoch": 0.3424940166707931, + "grad_norm": 3.39529209445191, + "learning_rate": 2.295245336460304e-06, + "loss": 0.5699, + "step": 8300 + }, + { + "epoch": 0.342535281010151, + "grad_norm": 3.1839876295587346, + "learning_rate": 2.2950753466764274e-06, + "loss": 0.576, + "step": 8301 + }, + { + "epoch": 0.34257654534950893, + "grad_norm": 2.796556072185351, + "learning_rate": 2.2949053426905164e-06, + "loss": 0.6162, + "step": 8302 + }, + { + "epoch": 0.3426178096888669, + "grad_norm": 3.8831141352228338, + "learning_rate": 2.294735324505607e-06, + "loss": 0.5536, + "step": 8303 + }, + { + "epoch": 0.3426590740282248, + "grad_norm": 4.211196567912478, + "learning_rate": 2.294565292124737e-06, + "loss": 0.5485, + "step": 8304 + }, + { + "epoch": 0.34270033836758274, + "grad_norm": 3.8093738014569825, + "learning_rate": 2.294395245550943e-06, + "loss": 0.5612, + "step": 8305 + }, + { + "epoch": 0.34274160270694065, + "grad_norm": 2.8237948249120626, + "learning_rate": 2.2942251847872628e-06, + "loss": 0.5432, + "step": 8306 + }, + { + "epoch": 0.34278286704629857, + "grad_norm": 4.933138972555669, + "learning_rate": 2.2940551098367338e-06, + "loss": 0.5048, + "step": 8307 + }, + { + "epoch": 0.34282413138565654, + "grad_norm": 3.109988985544104, + "learning_rate": 2.293885020702394e-06, + "loss": 0.5292, + "step": 8308 + }, + { + "epoch": 0.34286539572501445, + "grad_norm": 2.3619330862315677, + "learning_rate": 2.2937149173872816e-06, + "loss": 0.5337, + "step": 8309 + }, + { + "epoch": 0.34290666006437237, + "grad_norm": 3.5013702634916606, + "learning_rate": 2.2935447998944356e-06, + "loss": 0.5748, + "step": 8310 + }, + { + "epoch": 0.3429479244037303, + "grad_norm": 3.4619305463371632, + "learning_rate": 2.2933746682268942e-06, + "loss": 0.5185, + "step": 8311 + }, + { + "epoch": 0.3429891887430882, + "grad_norm": 6.904331289773183, + "learning_rate": 2.2932045223876963e-06, + "loss": 0.6034, + "step": 8312 + }, + { + "epoch": 0.3430304530824462, + "grad_norm": 2.1654847121296337, + "learning_rate": 2.2930343623798814e-06, + "loss": 0.5055, + "step": 8313 + }, + { + "epoch": 0.3430717174218041, + "grad_norm": 3.197552530239863, + "learning_rate": 2.2928641882064895e-06, + "loss": 0.5943, + "step": 8314 + }, + { + "epoch": 0.343112981761162, + "grad_norm": 2.5978922773823228, + "learning_rate": 2.292693999870559e-06, + "loss": 0.5474, + "step": 8315 + }, + { + "epoch": 0.3431542461005199, + "grad_norm": 4.23278281660668, + "learning_rate": 2.292523797375131e-06, + "loss": 0.5588, + "step": 8316 + }, + { + "epoch": 0.34319551043987784, + "grad_norm": 2.2953856185199992, + "learning_rate": 2.2923535807232455e-06, + "loss": 0.5542, + "step": 8317 + }, + { + "epoch": 0.3432367747792358, + "grad_norm": 5.1827408672239015, + "learning_rate": 2.292183349917942e-06, + "loss": 0.5025, + "step": 8318 + }, + { + "epoch": 0.3432780391185937, + "grad_norm": 2.5898347883377153, + "learning_rate": 2.2920131049622637e-06, + "loss": 0.5461, + "step": 8319 + }, + { + "epoch": 0.34331930345795164, + "grad_norm": 5.040004742830388, + "learning_rate": 2.2918428458592485e-06, + "loss": 0.5686, + "step": 8320 + }, + { + "epoch": 0.34336056779730956, + "grad_norm": 3.611457477649882, + "learning_rate": 2.2916725726119403e-06, + "loss": 0.5493, + "step": 8321 + }, + { + "epoch": 0.3434018321366675, + "grad_norm": 7.634700314638133, + "learning_rate": 2.291502285223379e-06, + "loss": 0.5695, + "step": 8322 + }, + { + "epoch": 0.34344309647602544, + "grad_norm": 4.709872641354612, + "learning_rate": 2.2913319836966067e-06, + "loss": 0.5244, + "step": 8323 + }, + { + "epoch": 0.34348436081538336, + "grad_norm": 3.194534722583127, + "learning_rate": 2.291161668034666e-06, + "loss": 0.6146, + "step": 8324 + }, + { + "epoch": 0.3435256251547413, + "grad_norm": 4.648892857314871, + "learning_rate": 2.2909913382405983e-06, + "loss": 0.5094, + "step": 8325 + }, + { + "epoch": 0.3435668894940992, + "grad_norm": 4.139990986320064, + "learning_rate": 2.2908209943174466e-06, + "loss": 0.5491, + "step": 8326 + }, + { + "epoch": 0.3436081538334571, + "grad_norm": 3.9031780091798995, + "learning_rate": 2.290650636268254e-06, + "loss": 0.5127, + "step": 8327 + }, + { + "epoch": 0.3436494181728151, + "grad_norm": 4.972914968645211, + "learning_rate": 2.2904802640960628e-06, + "loss": 0.5338, + "step": 8328 + }, + { + "epoch": 0.343690682512173, + "grad_norm": 7.263012782436856, + "learning_rate": 2.290309877803917e-06, + "loss": 0.5645, + "step": 8329 + }, + { + "epoch": 0.3437319468515309, + "grad_norm": 3.172677908674842, + "learning_rate": 2.2901394773948595e-06, + "loss": 0.5255, + "step": 8330 + }, + { + "epoch": 0.3437732111908888, + "grad_norm": 5.13847459092614, + "learning_rate": 2.2899690628719343e-06, + "loss": 0.513, + "step": 8331 + }, + { + "epoch": 0.34381447553024674, + "grad_norm": 3.2023213510397697, + "learning_rate": 2.2897986342381852e-06, + "loss": 0.5701, + "step": 8332 + }, + { + "epoch": 0.3438557398696047, + "grad_norm": 5.653235462745036, + "learning_rate": 2.289628191496657e-06, + "loss": 0.5436, + "step": 8333 + }, + { + "epoch": 0.34389700420896263, + "grad_norm": 2.839868940626653, + "learning_rate": 2.2894577346503935e-06, + "loss": 0.548, + "step": 8334 + }, + { + "epoch": 0.34393826854832055, + "grad_norm": 3.15486703390949, + "learning_rate": 2.2892872637024405e-06, + "loss": 0.5161, + "step": 8335 + }, + { + "epoch": 0.34397953288767846, + "grad_norm": 3.1017087154249197, + "learning_rate": 2.289116778655842e-06, + "loss": 0.5089, + "step": 8336 + }, + { + "epoch": 0.3440207972270364, + "grad_norm": 4.155953200527245, + "learning_rate": 2.2889462795136442e-06, + "loss": 0.5163, + "step": 8337 + }, + { + "epoch": 0.3440620615663943, + "grad_norm": 4.543302487191708, + "learning_rate": 2.288775766278892e-06, + "loss": 0.6103, + "step": 8338 + }, + { + "epoch": 0.34410332590575227, + "grad_norm": 2.9318445133972344, + "learning_rate": 2.2886052389546313e-06, + "loss": 0.5824, + "step": 8339 + }, + { + "epoch": 0.3441445902451102, + "grad_norm": 2.8407629337663245, + "learning_rate": 2.288434697543908e-06, + "loss": 0.5856, + "step": 8340 + }, + { + "epoch": 0.3441858545844681, + "grad_norm": 3.5819042132483805, + "learning_rate": 2.2882641420497695e-06, + "loss": 0.4997, + "step": 8341 + }, + { + "epoch": 0.344227118923826, + "grad_norm": 2.2252651971062325, + "learning_rate": 2.2880935724752606e-06, + "loss": 0.5283, + "step": 8342 + }, + { + "epoch": 0.34426838326318393, + "grad_norm": 2.476195243048569, + "learning_rate": 2.2879229888234296e-06, + "loss": 0.5178, + "step": 8343 + }, + { + "epoch": 0.3443096476025419, + "grad_norm": 3.3784782067964727, + "learning_rate": 2.2877523910973225e-06, + "loss": 0.5755, + "step": 8344 + }, + { + "epoch": 0.3443509119418998, + "grad_norm": 4.217207612899498, + "learning_rate": 2.2875817792999877e-06, + "loss": 0.5369, + "step": 8345 + }, + { + "epoch": 0.34439217628125773, + "grad_norm": 18.577216941888018, + "learning_rate": 2.287411153434471e-06, + "loss": 0.5389, + "step": 8346 + }, + { + "epoch": 0.34443344062061565, + "grad_norm": 2.5080312222548646, + "learning_rate": 2.287240513503822e-06, + "loss": 0.5195, + "step": 8347 + }, + { + "epoch": 0.34447470495997357, + "grad_norm": 2.414705640528768, + "learning_rate": 2.287069859511088e-06, + "loss": 0.5226, + "step": 8348 + }, + { + "epoch": 0.34451596929933154, + "grad_norm": 6.972353995803145, + "learning_rate": 2.2868991914593174e-06, + "loss": 0.5001, + "step": 8349 + }, + { + "epoch": 0.34455723363868945, + "grad_norm": 9.426394447156799, + "learning_rate": 2.286728509351559e-06, + "loss": 0.5383, + "step": 8350 + }, + { + "epoch": 0.34459849797804737, + "grad_norm": 3.076526883813889, + "learning_rate": 2.2865578131908607e-06, + "loss": 0.5859, + "step": 8351 + }, + { + "epoch": 0.3446397623174053, + "grad_norm": 2.5473140320667578, + "learning_rate": 2.2863871029802723e-06, + "loss": 0.574, + "step": 8352 + }, + { + "epoch": 0.3446810266567632, + "grad_norm": 2.629763668099028, + "learning_rate": 2.2862163787228432e-06, + "loss": 0.5375, + "step": 8353 + }, + { + "epoch": 0.3447222909961212, + "grad_norm": 6.165018705331671, + "learning_rate": 2.286045640421623e-06, + "loss": 0.5819, + "step": 8354 + }, + { + "epoch": 0.3447635553354791, + "grad_norm": 7.049282803240227, + "learning_rate": 2.285874888079661e-06, + "loss": 0.5471, + "step": 8355 + }, + { + "epoch": 0.344804819674837, + "grad_norm": 2.5585060837744398, + "learning_rate": 2.285704121700008e-06, + "loss": 0.5372, + "step": 8356 + }, + { + "epoch": 0.3448460840141949, + "grad_norm": 8.096549332953316, + "learning_rate": 2.2855333412857132e-06, + "loss": 0.5438, + "step": 8357 + }, + { + "epoch": 0.34488734835355284, + "grad_norm": 3.944961874649011, + "learning_rate": 2.285362546839828e-06, + "loss": 0.5128, + "step": 8358 + }, + { + "epoch": 0.3449286126929108, + "grad_norm": 3.153155414153282, + "learning_rate": 2.285191738365403e-06, + "loss": 0.4613, + "step": 8359 + }, + { + "epoch": 0.3449698770322687, + "grad_norm": 4.925059934633646, + "learning_rate": 2.2850209158654897e-06, + "loss": 0.5591, + "step": 8360 + }, + { + "epoch": 0.34501114137162664, + "grad_norm": 2.35480524711741, + "learning_rate": 2.2848500793431388e-06, + "loss": 0.5809, + "step": 8361 + }, + { + "epoch": 0.34505240571098456, + "grad_norm": 7.678918738487674, + "learning_rate": 2.284679228801402e-06, + "loss": 0.5265, + "step": 8362 + }, + { + "epoch": 0.34509367005034247, + "grad_norm": 16.77910764254486, + "learning_rate": 2.2845083642433307e-06, + "loss": 0.5499, + "step": 8363 + }, + { + "epoch": 0.34513493438970044, + "grad_norm": 3.7607765984669874, + "learning_rate": 2.2843374856719786e-06, + "loss": 0.5648, + "step": 8364 + }, + { + "epoch": 0.34517619872905836, + "grad_norm": 3.162570452131431, + "learning_rate": 2.284166593090396e-06, + "loss": 0.5058, + "step": 8365 + }, + { + "epoch": 0.3452174630684163, + "grad_norm": 4.126872289042607, + "learning_rate": 2.2839956865016365e-06, + "loss": 0.5141, + "step": 8366 + }, + { + "epoch": 0.3452587274077742, + "grad_norm": 4.343220752606336, + "learning_rate": 2.283824765908753e-06, + "loss": 0.5275, + "step": 8367 + }, + { + "epoch": 0.3452999917471321, + "grad_norm": 3.123273673266942, + "learning_rate": 2.2836538313147983e-06, + "loss": 0.5626, + "step": 8368 + }, + { + "epoch": 0.3453412560864901, + "grad_norm": 2.89517031491337, + "learning_rate": 2.2834828827228255e-06, + "loss": 0.5682, + "step": 8369 + }, + { + "epoch": 0.345382520425848, + "grad_norm": 3.2559232108108658, + "learning_rate": 2.2833119201358893e-06, + "loss": 0.499, + "step": 8370 + }, + { + "epoch": 0.3454237847652059, + "grad_norm": 12.5669065472344, + "learning_rate": 2.2831409435570417e-06, + "loss": 0.5204, + "step": 8371 + }, + { + "epoch": 0.3454650491045638, + "grad_norm": 3.304344650913358, + "learning_rate": 2.282969952989338e-06, + "loss": 0.5904, + "step": 8372 + }, + { + "epoch": 0.34550631344392174, + "grad_norm": 2.5739645863336196, + "learning_rate": 2.2827989484358323e-06, + "loss": 0.5496, + "step": 8373 + }, + { + "epoch": 0.3455475777832797, + "grad_norm": 5.05253328440061, + "learning_rate": 2.2826279298995794e-06, + "loss": 0.5811, + "step": 8374 + }, + { + "epoch": 0.34558884212263763, + "grad_norm": 2.8287648946148023, + "learning_rate": 2.2824568973836335e-06, + "loss": 0.5181, + "step": 8375 + }, + { + "epoch": 0.34563010646199555, + "grad_norm": 7.994483198158929, + "learning_rate": 2.28228585089105e-06, + "loss": 0.5507, + "step": 8376 + }, + { + "epoch": 0.34567137080135346, + "grad_norm": 2.0749059785354316, + "learning_rate": 2.2821147904248846e-06, + "loss": 0.5074, + "step": 8377 + }, + { + "epoch": 0.3457126351407114, + "grad_norm": 2.6844436253768866, + "learning_rate": 2.2819437159881922e-06, + "loss": 0.5513, + "step": 8378 + }, + { + "epoch": 0.34575389948006935, + "grad_norm": 3.1730192698754176, + "learning_rate": 2.2817726275840286e-06, + "loss": 0.5988, + "step": 8379 + }, + { + "epoch": 0.34579516381942726, + "grad_norm": 2.761145516993604, + "learning_rate": 2.2816015252154507e-06, + "loss": 0.4729, + "step": 8380 + }, + { + "epoch": 0.3458364281587852, + "grad_norm": 3.518699620970054, + "learning_rate": 2.2814304088855137e-06, + "loss": 0.532, + "step": 8381 + }, + { + "epoch": 0.3458776924981431, + "grad_norm": 6.622614626241621, + "learning_rate": 2.2812592785972755e-06, + "loss": 0.5365, + "step": 8382 + }, + { + "epoch": 0.345918956837501, + "grad_norm": 3.1077748072127314, + "learning_rate": 2.2810881343537917e-06, + "loss": 0.5987, + "step": 8383 + }, + { + "epoch": 0.345960221176859, + "grad_norm": 4.376795010186707, + "learning_rate": 2.28091697615812e-06, + "loss": 0.5761, + "step": 8384 + }, + { + "epoch": 0.3460014855162169, + "grad_norm": 2.4383987095139372, + "learning_rate": 2.2807458040133176e-06, + "loss": 0.5315, + "step": 8385 + }, + { + "epoch": 0.3460427498555748, + "grad_norm": 2.3361119796484515, + "learning_rate": 2.280574617922442e-06, + "loss": 0.5578, + "step": 8386 + }, + { + "epoch": 0.34608401419493273, + "grad_norm": 2.997129087371944, + "learning_rate": 2.280403417888551e-06, + "loss": 0.5436, + "step": 8387 + }, + { + "epoch": 0.34612527853429065, + "grad_norm": 2.843230110145226, + "learning_rate": 2.2802322039147022e-06, + "loss": 0.5483, + "step": 8388 + }, + { + "epoch": 0.3461665428736486, + "grad_norm": 2.404788983958284, + "learning_rate": 2.2800609760039547e-06, + "loss": 0.5889, + "step": 8389 + }, + { + "epoch": 0.34620780721300654, + "grad_norm": 2.3445757147218096, + "learning_rate": 2.2798897341593673e-06, + "loss": 0.5479, + "step": 8390 + }, + { + "epoch": 0.34624907155236445, + "grad_norm": 9.250106462450017, + "learning_rate": 2.2797184783839976e-06, + "loss": 0.5169, + "step": 8391 + }, + { + "epoch": 0.34629033589172237, + "grad_norm": 2.9497082948252342, + "learning_rate": 2.2795472086809057e-06, + "loss": 0.5119, + "step": 8392 + }, + { + "epoch": 0.3463316002310803, + "grad_norm": 10.07768704844769, + "learning_rate": 2.2793759250531504e-06, + "loss": 0.5573, + "step": 8393 + }, + { + "epoch": 0.34637286457043825, + "grad_norm": 17.54341029374448, + "learning_rate": 2.279204627503791e-06, + "loss": 0.5107, + "step": 8394 + }, + { + "epoch": 0.34641412890979617, + "grad_norm": 3.101427765075062, + "learning_rate": 2.279033316035888e-06, + "loss": 0.5067, + "step": 8395 + }, + { + "epoch": 0.3464553932491541, + "grad_norm": 8.273434945710175, + "learning_rate": 2.2788619906525015e-06, + "loss": 0.5685, + "step": 8396 + }, + { + "epoch": 0.346496657588512, + "grad_norm": 3.1149756348048463, + "learning_rate": 2.2786906513566915e-06, + "loss": 0.564, + "step": 8397 + }, + { + "epoch": 0.3465379219278699, + "grad_norm": 2.9572782804748945, + "learning_rate": 2.278519298151518e-06, + "loss": 0.5734, + "step": 8398 + }, + { + "epoch": 0.34657918626722783, + "grad_norm": 3.024365367996845, + "learning_rate": 2.2783479310400424e-06, + "loss": 0.5592, + "step": 8399 + }, + { + "epoch": 0.3466204506065858, + "grad_norm": 2.3875686038379405, + "learning_rate": 2.2781765500253253e-06, + "loss": 0.5227, + "step": 8400 + }, + { + "epoch": 0.3466617149459437, + "grad_norm": 4.6396226412521475, + "learning_rate": 2.2780051551104287e-06, + "loss": 0.5724, + "step": 8401 + }, + { + "epoch": 0.34670297928530164, + "grad_norm": 3.0734732245551952, + "learning_rate": 2.2778337462984134e-06, + "loss": 0.5896, + "step": 8402 + }, + { + "epoch": 0.34674424362465955, + "grad_norm": 5.016886615388698, + "learning_rate": 2.2776623235923425e-06, + "loss": 0.5502, + "step": 8403 + }, + { + "epoch": 0.34678550796401747, + "grad_norm": 8.76087664240176, + "learning_rate": 2.2774908869952764e-06, + "loss": 0.4882, + "step": 8404 + }, + { + "epoch": 0.34682677230337544, + "grad_norm": 2.671187725959721, + "learning_rate": 2.2773194365102776e-06, + "loss": 0.49, + "step": 8405 + }, + { + "epoch": 0.34686803664273336, + "grad_norm": 15.760380094021176, + "learning_rate": 2.27714797214041e-06, + "loss": 0.5483, + "step": 8406 + }, + { + "epoch": 0.3469093009820913, + "grad_norm": 3.89938410775757, + "learning_rate": 2.276976493888735e-06, + "loss": 0.5329, + "step": 8407 + }, + { + "epoch": 0.3469505653214492, + "grad_norm": 3.089753431510194, + "learning_rate": 2.2768050017583158e-06, + "loss": 0.5108, + "step": 8408 + }, + { + "epoch": 0.3469918296608071, + "grad_norm": 2.1014147480720045, + "learning_rate": 2.276633495752217e-06, + "loss": 0.4994, + "step": 8409 + }, + { + "epoch": 0.3470330940001651, + "grad_norm": 2.2647952876341892, + "learning_rate": 2.2764619758735006e-06, + "loss": 0.4673, + "step": 8410 + }, + { + "epoch": 0.347074358339523, + "grad_norm": 4.848714425733035, + "learning_rate": 2.2762904421252304e-06, + "loss": 0.5317, + "step": 8411 + }, + { + "epoch": 0.3471156226788809, + "grad_norm": 2.9131844319702482, + "learning_rate": 2.2761188945104714e-06, + "loss": 0.5638, + "step": 8412 + }, + { + "epoch": 0.3471568870182388, + "grad_norm": 3.127109753320622, + "learning_rate": 2.2759473330322874e-06, + "loss": 0.5129, + "step": 8413 + }, + { + "epoch": 0.34719815135759674, + "grad_norm": 3.0274457606657164, + "learning_rate": 2.2757757576937428e-06, + "loss": 0.5511, + "step": 8414 + }, + { + "epoch": 0.3472394156969547, + "grad_norm": 2.627276960201659, + "learning_rate": 2.2756041684979022e-06, + "loss": 0.5112, + "step": 8415 + }, + { + "epoch": 0.34728068003631263, + "grad_norm": 3.2947040116305524, + "learning_rate": 2.2754325654478315e-06, + "loss": 0.588, + "step": 8416 + }, + { + "epoch": 0.34732194437567054, + "grad_norm": 5.8308940086252585, + "learning_rate": 2.2752609485465953e-06, + "loss": 0.591, + "step": 8417 + }, + { + "epoch": 0.34736320871502846, + "grad_norm": 2.7515334589398432, + "learning_rate": 2.2750893177972585e-06, + "loss": 0.5448, + "step": 8418 + }, + { + "epoch": 0.3474044730543864, + "grad_norm": 3.475035572916431, + "learning_rate": 2.274917673202888e-06, + "loss": 0.5641, + "step": 8419 + }, + { + "epoch": 0.34744573739374435, + "grad_norm": 3.150183343270019, + "learning_rate": 2.274746014766549e-06, + "loss": 0.489, + "step": 8420 + }, + { + "epoch": 0.34748700173310226, + "grad_norm": 3.9420908422386995, + "learning_rate": 2.2745743424913084e-06, + "loss": 0.5049, + "step": 8421 + }, + { + "epoch": 0.3475282660724602, + "grad_norm": 5.189159772439471, + "learning_rate": 2.2744026563802323e-06, + "loss": 0.5666, + "step": 8422 + }, + { + "epoch": 0.3475695304118181, + "grad_norm": 3.968921078797341, + "learning_rate": 2.274230956436388e-06, + "loss": 0.5701, + "step": 8423 + }, + { + "epoch": 0.347610794751176, + "grad_norm": 2.1676147109110357, + "learning_rate": 2.2740592426628407e-06, + "loss": 0.5567, + "step": 8424 + }, + { + "epoch": 0.347652059090534, + "grad_norm": 2.978352009188713, + "learning_rate": 2.27388751506266e-06, + "loss": 0.5241, + "step": 8425 + }, + { + "epoch": 0.3476933234298919, + "grad_norm": 3.780725817548009, + "learning_rate": 2.2737157736389117e-06, + "loss": 0.5726, + "step": 8426 + }, + { + "epoch": 0.3477345877692498, + "grad_norm": 6.169581217220398, + "learning_rate": 2.2735440183946646e-06, + "loss": 0.5368, + "step": 8427 + }, + { + "epoch": 0.34777585210860773, + "grad_norm": 4.1020342774468945, + "learning_rate": 2.2733722493329857e-06, + "loss": 0.5304, + "step": 8428 + }, + { + "epoch": 0.34781711644796565, + "grad_norm": 2.6732132699594775, + "learning_rate": 2.2732004664569445e-06, + "loss": 0.5052, + "step": 8429 + }, + { + "epoch": 0.3478583807873236, + "grad_norm": 2.684321373434061, + "learning_rate": 2.2730286697696084e-06, + "loss": 0.5191, + "step": 8430 + }, + { + "epoch": 0.34789964512668153, + "grad_norm": 2.923153271119574, + "learning_rate": 2.272856859274046e-06, + "loss": 0.4704, + "step": 8431 + }, + { + "epoch": 0.34794090946603945, + "grad_norm": 6.89727145276901, + "learning_rate": 2.272685034973327e-06, + "loss": 0.5743, + "step": 8432 + }, + { + "epoch": 0.34798217380539737, + "grad_norm": 4.002039856984148, + "learning_rate": 2.27251319687052e-06, + "loss": 0.5382, + "step": 8433 + }, + { + "epoch": 0.3480234381447553, + "grad_norm": 6.111096251777679, + "learning_rate": 2.2723413449686956e-06, + "loss": 0.5177, + "step": 8434 + }, + { + "epoch": 0.34806470248411325, + "grad_norm": 4.040574053370598, + "learning_rate": 2.272169479270922e-06, + "loss": 0.617, + "step": 8435 + }, + { + "epoch": 0.34810596682347117, + "grad_norm": 4.144668170176394, + "learning_rate": 2.2719975997802705e-06, + "loss": 0.6116, + "step": 8436 + }, + { + "epoch": 0.3481472311628291, + "grad_norm": 8.914859836079938, + "learning_rate": 2.27182570649981e-06, + "loss": 0.5493, + "step": 8437 + }, + { + "epoch": 0.348188495502187, + "grad_norm": 1.7051600093469603, + "learning_rate": 2.271653799432612e-06, + "loss": 0.5374, + "step": 8438 + }, + { + "epoch": 0.3482297598415449, + "grad_norm": 3.1956385762455275, + "learning_rate": 2.271481878581746e-06, + "loss": 0.5812, + "step": 8439 + }, + { + "epoch": 0.3482710241809029, + "grad_norm": 4.623515559318016, + "learning_rate": 2.271309943950284e-06, + "loss": 0.5635, + "step": 8440 + }, + { + "epoch": 0.3483122885202608, + "grad_norm": 3.878146814877936, + "learning_rate": 2.271137995541297e-06, + "loss": 0.514, + "step": 8441 + }, + { + "epoch": 0.3483535528596187, + "grad_norm": 2.8445663424101455, + "learning_rate": 2.270966033357857e-06, + "loss": 0.5394, + "step": 8442 + }, + { + "epoch": 0.34839481719897664, + "grad_norm": 2.5893026463081714, + "learning_rate": 2.270794057403034e-06, + "loss": 0.5181, + "step": 8443 + }, + { + "epoch": 0.34843608153833455, + "grad_norm": 2.726574034556617, + "learning_rate": 2.270622067679901e-06, + "loss": 0.6089, + "step": 8444 + }, + { + "epoch": 0.3484773458776925, + "grad_norm": 2.871195143399265, + "learning_rate": 2.2704500641915305e-06, + "loss": 0.5588, + "step": 8445 + }, + { + "epoch": 0.34851861021705044, + "grad_norm": 3.6967990421018957, + "learning_rate": 2.2702780469409946e-06, + "loss": 0.5526, + "step": 8446 + }, + { + "epoch": 0.34855987455640836, + "grad_norm": 11.035401040645603, + "learning_rate": 2.2701060159313652e-06, + "loss": 0.529, + "step": 8447 + }, + { + "epoch": 0.34860113889576627, + "grad_norm": 2.2109762631912195, + "learning_rate": 2.2699339711657162e-06, + "loss": 0.5059, + "step": 8448 + }, + { + "epoch": 0.3486424032351242, + "grad_norm": 2.0429440088764528, + "learning_rate": 2.2697619126471206e-06, + "loss": 0.5202, + "step": 8449 + }, + { + "epoch": 0.34868366757448216, + "grad_norm": 22.207803045137844, + "learning_rate": 2.2695898403786515e-06, + "loss": 0.5647, + "step": 8450 + }, + { + "epoch": 0.3487249319138401, + "grad_norm": 5.267425993543863, + "learning_rate": 2.269417754363382e-06, + "loss": 0.5909, + "step": 8451 + }, + { + "epoch": 0.348766196253198, + "grad_norm": 10.816691393469098, + "learning_rate": 2.2692456546043877e-06, + "loss": 0.5803, + "step": 8452 + }, + { + "epoch": 0.3488074605925559, + "grad_norm": 4.731498390933439, + "learning_rate": 2.2690735411047403e-06, + "loss": 0.4939, + "step": 8453 + }, + { + "epoch": 0.3488487249319138, + "grad_norm": 7.219978218903025, + "learning_rate": 2.268901413867517e-06, + "loss": 0.5461, + "step": 8454 + }, + { + "epoch": 0.3488899892712718, + "grad_norm": 4.441469936206217, + "learning_rate": 2.26872927289579e-06, + "loss": 0.52, + "step": 8455 + }, + { + "epoch": 0.3489312536106297, + "grad_norm": 6.389537724505761, + "learning_rate": 2.2685571181926355e-06, + "loss": 0.4837, + "step": 8456 + }, + { + "epoch": 0.3489725179499876, + "grad_norm": 4.024347383268637, + "learning_rate": 2.2683849497611282e-06, + "loss": 0.5106, + "step": 8457 + }, + { + "epoch": 0.34901378228934554, + "grad_norm": 3.9488170295042972, + "learning_rate": 2.268212767604343e-06, + "loss": 0.5431, + "step": 8458 + }, + { + "epoch": 0.34905504662870346, + "grad_norm": 2.79998541025537, + "learning_rate": 2.2680405717253567e-06, + "loss": 0.5571, + "step": 8459 + }, + { + "epoch": 0.3490963109680614, + "grad_norm": 3.095948409115447, + "learning_rate": 2.267868362127244e-06, + "loss": 0.5507, + "step": 8460 + }, + { + "epoch": 0.34913757530741935, + "grad_norm": 4.027824462763603, + "learning_rate": 2.2676961388130814e-06, + "loss": 0.5173, + "step": 8461 + }, + { + "epoch": 0.34917883964677726, + "grad_norm": 3.363227803300917, + "learning_rate": 2.267523901785946e-06, + "loss": 0.5774, + "step": 8462 + }, + { + "epoch": 0.3492201039861352, + "grad_norm": 4.506392605183313, + "learning_rate": 2.267351651048913e-06, + "loss": 0.5824, + "step": 8463 + }, + { + "epoch": 0.3492613683254931, + "grad_norm": 5.526289924851368, + "learning_rate": 2.26717938660506e-06, + "loss": 0.5023, + "step": 8464 + }, + { + "epoch": 0.349302632664851, + "grad_norm": 3.4484804862989162, + "learning_rate": 2.267007108457464e-06, + "loss": 0.5152, + "step": 8465 + }, + { + "epoch": 0.349343897004209, + "grad_norm": 7.589212130888713, + "learning_rate": 2.2668348166092023e-06, + "loss": 0.5617, + "step": 8466 + }, + { + "epoch": 0.3493851613435669, + "grad_norm": 4.463696254161126, + "learning_rate": 2.266662511063352e-06, + "loss": 0.5785, + "step": 8467 + }, + { + "epoch": 0.3494264256829248, + "grad_norm": 3.498051706907644, + "learning_rate": 2.266490191822992e-06, + "loss": 0.5, + "step": 8468 + }, + { + "epoch": 0.34946769002228273, + "grad_norm": 4.05118842424505, + "learning_rate": 2.2663178588911994e-06, + "loss": 0.5389, + "step": 8469 + }, + { + "epoch": 0.34950895436164064, + "grad_norm": 2.5750820347172327, + "learning_rate": 2.266145512271053e-06, + "loss": 0.542, + "step": 8470 + }, + { + "epoch": 0.3495502187009986, + "grad_norm": 3.16905299208082, + "learning_rate": 2.2659731519656312e-06, + "loss": 0.5869, + "step": 8471 + }, + { + "epoch": 0.34959148304035653, + "grad_norm": 4.339565350634151, + "learning_rate": 2.2658007779780123e-06, + "loss": 0.5793, + "step": 8472 + }, + { + "epoch": 0.34963274737971445, + "grad_norm": 2.9631935018510047, + "learning_rate": 2.2656283903112764e-06, + "loss": 0.5327, + "step": 8473 + }, + { + "epoch": 0.34967401171907236, + "grad_norm": 3.826532748481982, + "learning_rate": 2.265455988968502e-06, + "loss": 0.5573, + "step": 8474 + }, + { + "epoch": 0.3497152760584303, + "grad_norm": 2.7282726819531984, + "learning_rate": 2.2652835739527682e-06, + "loss": 0.5394, + "step": 8475 + }, + { + "epoch": 0.34975654039778825, + "grad_norm": 5.68991065825189, + "learning_rate": 2.2651111452671557e-06, + "loss": 0.553, + "step": 8476 + }, + { + "epoch": 0.34979780473714617, + "grad_norm": 6.2998912285509245, + "learning_rate": 2.264938702914744e-06, + "loss": 0.5393, + "step": 8477 + }, + { + "epoch": 0.3498390690765041, + "grad_norm": 5.080033456791608, + "learning_rate": 2.264766246898613e-06, + "loss": 0.4787, + "step": 8478 + }, + { + "epoch": 0.349880333415862, + "grad_norm": 2.563653205205035, + "learning_rate": 2.2645937772218448e-06, + "loss": 0.522, + "step": 8479 + }, + { + "epoch": 0.3499215977552199, + "grad_norm": 3.6278384992980772, + "learning_rate": 2.2644212938875185e-06, + "loss": 0.4627, + "step": 8480 + }, + { + "epoch": 0.3499628620945779, + "grad_norm": 3.137834787374065, + "learning_rate": 2.2642487968987158e-06, + "loss": 0.5698, + "step": 8481 + }, + { + "epoch": 0.3500041264339358, + "grad_norm": 3.938315257704955, + "learning_rate": 2.2640762862585172e-06, + "loss": 0.532, + "step": 8482 + }, + { + "epoch": 0.3500453907732937, + "grad_norm": 4.244303298857892, + "learning_rate": 2.2639037619700052e-06, + "loss": 0.4839, + "step": 8483 + }, + { + "epoch": 0.35008665511265163, + "grad_norm": 4.777248018258055, + "learning_rate": 2.2637312240362604e-06, + "loss": 0.5431, + "step": 8484 + }, + { + "epoch": 0.35012791945200955, + "grad_norm": 3.218794200292287, + "learning_rate": 2.263558672460366e-06, + "loss": 0.5534, + "step": 8485 + }, + { + "epoch": 0.3501691837913675, + "grad_norm": 2.3254522655906147, + "learning_rate": 2.263386107245403e-06, + "loss": 0.4787, + "step": 8486 + }, + { + "epoch": 0.35021044813072544, + "grad_norm": 3.0811200201623703, + "learning_rate": 2.2632135283944553e-06, + "loss": 0.504, + "step": 8487 + }, + { + "epoch": 0.35025171247008335, + "grad_norm": 2.445708644276369, + "learning_rate": 2.263040935910604e-06, + "loss": 0.5289, + "step": 8488 + }, + { + "epoch": 0.35029297680944127, + "grad_norm": 3.853280414547019, + "learning_rate": 2.2628683297969332e-06, + "loss": 0.5179, + "step": 8489 + }, + { + "epoch": 0.3503342411487992, + "grad_norm": 6.191401298417653, + "learning_rate": 2.2626957100565253e-06, + "loss": 0.5296, + "step": 8490 + }, + { + "epoch": 0.35037550548815716, + "grad_norm": 2.458657813005112, + "learning_rate": 2.2625230766924645e-06, + "loss": 0.5834, + "step": 8491 + }, + { + "epoch": 0.3504167698275151, + "grad_norm": 2.355101542275021, + "learning_rate": 2.262350429707833e-06, + "loss": 0.5289, + "step": 8492 + }, + { + "epoch": 0.350458034166873, + "grad_norm": 9.87601502302189, + "learning_rate": 2.2621777691057168e-06, + "loss": 0.5756, + "step": 8493 + }, + { + "epoch": 0.3504992985062309, + "grad_norm": 2.8794785698939984, + "learning_rate": 2.262005094889198e-06, + "loss": 0.5153, + "step": 8494 + }, + { + "epoch": 0.3505405628455888, + "grad_norm": 3.5154229882503354, + "learning_rate": 2.2618324070613633e-06, + "loss": 0.5305, + "step": 8495 + }, + { + "epoch": 0.3505818271849468, + "grad_norm": 2.58239704481238, + "learning_rate": 2.2616597056252947e-06, + "loss": 0.5506, + "step": 8496 + }, + { + "epoch": 0.3506230915243047, + "grad_norm": 2.233928055705421, + "learning_rate": 2.261486990584079e-06, + "loss": 0.507, + "step": 8497 + }, + { + "epoch": 0.3506643558636626, + "grad_norm": 2.7599716013135676, + "learning_rate": 2.2613142619408006e-06, + "loss": 0.5807, + "step": 8498 + }, + { + "epoch": 0.35070562020302054, + "grad_norm": 4.843191996635171, + "learning_rate": 2.261141519698545e-06, + "loss": 0.5391, + "step": 8499 + }, + { + "epoch": 0.35074688454237846, + "grad_norm": 4.59473659577467, + "learning_rate": 2.2609687638603973e-06, + "loss": 0.5904, + "step": 8500 + }, + { + "epoch": 0.35078814888173643, + "grad_norm": 3.325397950346938, + "learning_rate": 2.2607959944294445e-06, + "loss": 0.5051, + "step": 8501 + }, + { + "epoch": 0.35082941322109434, + "grad_norm": 6.048219974364839, + "learning_rate": 2.2606232114087718e-06, + "loss": 0.5459, + "step": 8502 + }, + { + "epoch": 0.35087067756045226, + "grad_norm": 2.7255702434231286, + "learning_rate": 2.2604504148014657e-06, + "loss": 0.5507, + "step": 8503 + }, + { + "epoch": 0.3509119418998102, + "grad_norm": 3.072577326009394, + "learning_rate": 2.2602776046106128e-06, + "loss": 0.5493, + "step": 8504 + }, + { + "epoch": 0.3509532062391681, + "grad_norm": 8.838642047940018, + "learning_rate": 2.2601047808393e-06, + "loss": 0.5705, + "step": 8505 + }, + { + "epoch": 0.35099447057852606, + "grad_norm": 3.026912252207343, + "learning_rate": 2.2599319434906143e-06, + "loss": 0.5178, + "step": 8506 + }, + { + "epoch": 0.351035734917884, + "grad_norm": 2.0982576838148472, + "learning_rate": 2.2597590925676427e-06, + "loss": 0.5628, + "step": 8507 + }, + { + "epoch": 0.3510769992572419, + "grad_norm": 7.481313281220447, + "learning_rate": 2.2595862280734737e-06, + "loss": 0.5037, + "step": 8508 + }, + { + "epoch": 0.3511182635965998, + "grad_norm": 13.608048798153387, + "learning_rate": 2.2594133500111942e-06, + "loss": 0.5651, + "step": 8509 + }, + { + "epoch": 0.3511595279359577, + "grad_norm": 3.650732209253396, + "learning_rate": 2.2592404583838926e-06, + "loss": 0.4934, + "step": 8510 + }, + { + "epoch": 0.3512007922753157, + "grad_norm": 2.49813508598344, + "learning_rate": 2.2590675531946567e-06, + "loss": 0.5137, + "step": 8511 + }, + { + "epoch": 0.3512420566146736, + "grad_norm": 2.9102971527390515, + "learning_rate": 2.258894634446576e-06, + "loss": 0.5585, + "step": 8512 + }, + { + "epoch": 0.35128332095403153, + "grad_norm": 3.3811521821165185, + "learning_rate": 2.2587217021427384e-06, + "loss": 0.5661, + "step": 8513 + }, + { + "epoch": 0.35132458529338945, + "grad_norm": 2.209943925574457, + "learning_rate": 2.2585487562862327e-06, + "loss": 0.5437, + "step": 8514 + }, + { + "epoch": 0.35136584963274736, + "grad_norm": 3.7788310055601104, + "learning_rate": 2.2583757968801495e-06, + "loss": 0.5168, + "step": 8515 + }, + { + "epoch": 0.35140711397210533, + "grad_norm": 3.261724450235079, + "learning_rate": 2.258202823927577e-06, + "loss": 0.5152, + "step": 8516 + }, + { + "epoch": 0.35144837831146325, + "grad_norm": 3.69338078378344, + "learning_rate": 2.258029837431605e-06, + "loss": 0.5502, + "step": 8517 + }, + { + "epoch": 0.35148964265082117, + "grad_norm": 16.47868989275908, + "learning_rate": 2.2578568373953243e-06, + "loss": 0.5051, + "step": 8518 + }, + { + "epoch": 0.3515309069901791, + "grad_norm": 4.047473056588818, + "learning_rate": 2.2576838238218243e-06, + "loss": 0.5329, + "step": 8519 + }, + { + "epoch": 0.351572171329537, + "grad_norm": 2.149671927602765, + "learning_rate": 2.257510796714196e-06, + "loss": 0.5054, + "step": 8520 + }, + { + "epoch": 0.3516134356688949, + "grad_norm": 3.243776409375125, + "learning_rate": 2.2573377560755296e-06, + "loss": 0.5071, + "step": 8521 + }, + { + "epoch": 0.3516547000082529, + "grad_norm": 2.9013527520051676, + "learning_rate": 2.2571647019089167e-06, + "loss": 0.6006, + "step": 8522 + }, + { + "epoch": 0.3516959643476108, + "grad_norm": 3.162294852296382, + "learning_rate": 2.2569916342174474e-06, + "loss": 0.5368, + "step": 8523 + }, + { + "epoch": 0.3517372286869687, + "grad_norm": 3.2361229851695374, + "learning_rate": 2.2568185530042144e-06, + "loss": 0.5604, + "step": 8524 + }, + { + "epoch": 0.35177849302632663, + "grad_norm": 3.353942778397752, + "learning_rate": 2.2566454582723085e-06, + "loss": 0.5036, + "step": 8525 + }, + { + "epoch": 0.35181975736568455, + "grad_norm": 4.723319556794843, + "learning_rate": 2.256472350024822e-06, + "loss": 0.5528, + "step": 8526 + }, + { + "epoch": 0.3518610217050425, + "grad_norm": 3.2092216417986648, + "learning_rate": 2.256299228264847e-06, + "loss": 0.5081, + "step": 8527 + }, + { + "epoch": 0.35190228604440044, + "grad_norm": 4.675877551745885, + "learning_rate": 2.256126092995476e-06, + "loss": 0.4402, + "step": 8528 + }, + { + "epoch": 0.35194355038375835, + "grad_norm": 3.6119252750341473, + "learning_rate": 2.255952944219801e-06, + "loss": 0.4835, + "step": 8529 + }, + { + "epoch": 0.35198481472311627, + "grad_norm": 3.9003503238729627, + "learning_rate": 2.255779781940916e-06, + "loss": 0.5798, + "step": 8530 + }, + { + "epoch": 0.3520260790624742, + "grad_norm": 4.035651402101692, + "learning_rate": 2.255606606161912e-06, + "loss": 0.5147, + "step": 8531 + }, + { + "epoch": 0.35206734340183216, + "grad_norm": 3.2375213435487282, + "learning_rate": 2.255433416885885e-06, + "loss": 0.5296, + "step": 8532 + }, + { + "epoch": 0.35210860774119007, + "grad_norm": 3.7352668276410435, + "learning_rate": 2.2552602141159268e-06, + "loss": 0.5023, + "step": 8533 + }, + { + "epoch": 0.352149872080548, + "grad_norm": 5.680986174353561, + "learning_rate": 2.2550869978551327e-06, + "loss": 0.5558, + "step": 8534 + }, + { + "epoch": 0.3521911364199059, + "grad_norm": 4.023909646280398, + "learning_rate": 2.254913768106595e-06, + "loss": 0.5472, + "step": 8535 + }, + { + "epoch": 0.3522324007592638, + "grad_norm": 2.5937690390362826, + "learning_rate": 2.2547405248734094e-06, + "loss": 0.5334, + "step": 8536 + }, + { + "epoch": 0.3522736650986218, + "grad_norm": 2.8034475530119796, + "learning_rate": 2.2545672681586694e-06, + "loss": 0.4424, + "step": 8537 + }, + { + "epoch": 0.3523149294379797, + "grad_norm": 2.9908717343232682, + "learning_rate": 2.2543939979654707e-06, + "loss": 0.5499, + "step": 8538 + }, + { + "epoch": 0.3523561937773376, + "grad_norm": 10.247914400846433, + "learning_rate": 2.254220714296908e-06, + "loss": 0.567, + "step": 8539 + }, + { + "epoch": 0.35239745811669554, + "grad_norm": 3.377975876129982, + "learning_rate": 2.2540474171560767e-06, + "loss": 0.5306, + "step": 8540 + }, + { + "epoch": 0.35243872245605345, + "grad_norm": 2.332277533276763, + "learning_rate": 2.253874106546072e-06, + "loss": 0.5695, + "step": 8541 + }, + { + "epoch": 0.3524799867954114, + "grad_norm": 2.9681670385169134, + "learning_rate": 2.25370078246999e-06, + "loss": 0.49, + "step": 8542 + }, + { + "epoch": 0.35252125113476934, + "grad_norm": 21.842341197654996, + "learning_rate": 2.2535274449309258e-06, + "loss": 0.5096, + "step": 8543 + }, + { + "epoch": 0.35256251547412726, + "grad_norm": 4.2987853616005625, + "learning_rate": 2.2533540939319773e-06, + "loss": 0.4705, + "step": 8544 + }, + { + "epoch": 0.3526037798134852, + "grad_norm": 3.3350340807815364, + "learning_rate": 2.2531807294762398e-06, + "loss": 0.552, + "step": 8545 + }, + { + "epoch": 0.3526450441528431, + "grad_norm": 2.898742028520678, + "learning_rate": 2.2530073515668103e-06, + "loss": 0.4933, + "step": 8546 + }, + { + "epoch": 0.35268630849220106, + "grad_norm": 14.622255863262934, + "learning_rate": 2.252833960206786e-06, + "loss": 0.5069, + "step": 8547 + }, + { + "epoch": 0.352727572831559, + "grad_norm": 5.209250580539723, + "learning_rate": 2.252660555399263e-06, + "loss": 0.5043, + "step": 8548 + }, + { + "epoch": 0.3527688371709169, + "grad_norm": 3.1160446128927317, + "learning_rate": 2.2524871371473404e-06, + "loss": 0.4898, + "step": 8549 + }, + { + "epoch": 0.3528101015102748, + "grad_norm": 3.0985489449889414, + "learning_rate": 2.2523137054541148e-06, + "loss": 0.5907, + "step": 8550 + }, + { + "epoch": 0.3528513658496327, + "grad_norm": 3.913084032555276, + "learning_rate": 2.2521402603226848e-06, + "loss": 0.4882, + "step": 8551 + }, + { + "epoch": 0.3528926301889907, + "grad_norm": 4.453418135845988, + "learning_rate": 2.251966801756148e-06, + "loss": 0.5642, + "step": 8552 + }, + { + "epoch": 0.3529338945283486, + "grad_norm": 4.192052678094718, + "learning_rate": 2.251793329757603e-06, + "loss": 0.5808, + "step": 8553 + }, + { + "epoch": 0.35297515886770653, + "grad_norm": 3.551415745023003, + "learning_rate": 2.251619844330148e-06, + "loss": 0.4955, + "step": 8554 + }, + { + "epoch": 0.35301642320706444, + "grad_norm": 3.0307305029697895, + "learning_rate": 2.251446345476883e-06, + "loss": 0.5575, + "step": 8555 + }, + { + "epoch": 0.35305768754642236, + "grad_norm": 5.674488092890907, + "learning_rate": 2.2512728332009056e-06, + "loss": 0.5489, + "step": 8556 + }, + { + "epoch": 0.35309895188578033, + "grad_norm": 2.5936882588283745, + "learning_rate": 2.251099307505317e-06, + "loss": 0.4928, + "step": 8557 + }, + { + "epoch": 0.35314021622513825, + "grad_norm": 7.629682185695638, + "learning_rate": 2.2509257683932154e-06, + "loss": 0.5052, + "step": 8558 + }, + { + "epoch": 0.35318148056449616, + "grad_norm": 4.561323777059661, + "learning_rate": 2.2507522158677007e-06, + "loss": 0.5427, + "step": 8559 + }, + { + "epoch": 0.3532227449038541, + "grad_norm": 2.578066535930844, + "learning_rate": 2.250578649931873e-06, + "loss": 0.5292, + "step": 8560 + }, + { + "epoch": 0.353264009243212, + "grad_norm": 3.108763729187223, + "learning_rate": 2.250405070588834e-06, + "loss": 0.5304, + "step": 8561 + }, + { + "epoch": 0.35330527358256997, + "grad_norm": 6.592282098590861, + "learning_rate": 2.2502314778416825e-06, + "loss": 0.5309, + "step": 8562 + }, + { + "epoch": 0.3533465379219279, + "grad_norm": 3.706434905080166, + "learning_rate": 2.2500578716935205e-06, + "loss": 0.5507, + "step": 8563 + }, + { + "epoch": 0.3533878022612858, + "grad_norm": 4.076554173332649, + "learning_rate": 2.2498842521474484e-06, + "loss": 0.533, + "step": 8564 + }, + { + "epoch": 0.3534290666006437, + "grad_norm": 479.134183546413, + "learning_rate": 2.2497106192065675e-06, + "loss": 0.5134, + "step": 8565 + }, + { + "epoch": 0.35347033094000163, + "grad_norm": 4.7340031004169845, + "learning_rate": 2.2495369728739797e-06, + "loss": 0.5697, + "step": 8566 + }, + { + "epoch": 0.3535115952793596, + "grad_norm": 2.3003514463428067, + "learning_rate": 2.2493633131527868e-06, + "loss": 0.6012, + "step": 8567 + }, + { + "epoch": 0.3535528596187175, + "grad_norm": 3.994043546645609, + "learning_rate": 2.2491896400460893e-06, + "loss": 0.5266, + "step": 8568 + }, + { + "epoch": 0.35359412395807543, + "grad_norm": 3.350940744303231, + "learning_rate": 2.2490159535569916e-06, + "loss": 0.5192, + "step": 8569 + }, + { + "epoch": 0.35363538829743335, + "grad_norm": 2.3461459945586367, + "learning_rate": 2.248842253688595e-06, + "loss": 0.5582, + "step": 8570 + }, + { + "epoch": 0.35367665263679127, + "grad_norm": 3.568622263402395, + "learning_rate": 2.248668540444003e-06, + "loss": 0.4982, + "step": 8571 + }, + { + "epoch": 0.35371791697614924, + "grad_norm": 4.696712030424371, + "learning_rate": 2.2484948138263165e-06, + "loss": 0.5461, + "step": 8572 + }, + { + "epoch": 0.35375918131550715, + "grad_norm": 2.8991230139278845, + "learning_rate": 2.2483210738386412e-06, + "loss": 0.5375, + "step": 8573 + }, + { + "epoch": 0.35380044565486507, + "grad_norm": 6.8661995720445965, + "learning_rate": 2.2481473204840796e-06, + "loss": 0.5577, + "step": 8574 + }, + { + "epoch": 0.353841709994223, + "grad_norm": 3.7549150349706206, + "learning_rate": 2.247973553765735e-06, + "loss": 0.5395, + "step": 8575 + }, + { + "epoch": 0.3538829743335809, + "grad_norm": 3.8516471524578253, + "learning_rate": 2.2477997736867114e-06, + "loss": 0.5622, + "step": 8576 + }, + { + "epoch": 0.3539242386729389, + "grad_norm": 3.8640451400509104, + "learning_rate": 2.247625980250114e-06, + "loss": 0.5369, + "step": 8577 + }, + { + "epoch": 0.3539655030122968, + "grad_norm": 2.0725211765939733, + "learning_rate": 2.2474521734590454e-06, + "loss": 0.5465, + "step": 8578 + }, + { + "epoch": 0.3540067673516547, + "grad_norm": 5.359144609511622, + "learning_rate": 2.2472783533166114e-06, + "loss": 0.5495, + "step": 8579 + }, + { + "epoch": 0.3540480316910126, + "grad_norm": 7.093556044790334, + "learning_rate": 2.247104519825917e-06, + "loss": 0.5749, + "step": 8580 + }, + { + "epoch": 0.35408929603037054, + "grad_norm": 3.881255967882388, + "learning_rate": 2.2469306729900667e-06, + "loss": 0.5235, + "step": 8581 + }, + { + "epoch": 0.35413056036972845, + "grad_norm": 3.2610798299706105, + "learning_rate": 2.2467568128121655e-06, + "loss": 0.4806, + "step": 8582 + }, + { + "epoch": 0.3541718247090864, + "grad_norm": 9.53472914906036, + "learning_rate": 2.24658293929532e-06, + "loss": 0.4853, + "step": 8583 + }, + { + "epoch": 0.35421308904844434, + "grad_norm": 3.0513114718193677, + "learning_rate": 2.2464090524426353e-06, + "loss": 0.5489, + "step": 8584 + }, + { + "epoch": 0.35425435338780226, + "grad_norm": 3.3096239646665757, + "learning_rate": 2.2462351522572183e-06, + "loss": 0.529, + "step": 8585 + }, + { + "epoch": 0.3542956177271602, + "grad_norm": 2.353082882598605, + "learning_rate": 2.246061238742174e-06, + "loss": 0.553, + "step": 8586 + }, + { + "epoch": 0.3543368820665181, + "grad_norm": 3.389395577357209, + "learning_rate": 2.2458873119006097e-06, + "loss": 0.5518, + "step": 8587 + }, + { + "epoch": 0.35437814640587606, + "grad_norm": 2.8691130185987794, + "learning_rate": 2.2457133717356327e-06, + "loss": 0.5975, + "step": 8588 + }, + { + "epoch": 0.354419410745234, + "grad_norm": 3.047059730638267, + "learning_rate": 2.245539418250348e-06, + "loss": 0.5578, + "step": 8589 + }, + { + "epoch": 0.3544606750845919, + "grad_norm": 5.885131854715832, + "learning_rate": 2.2453654514478656e-06, + "loss": 0.5629, + "step": 8590 + }, + { + "epoch": 0.3545019394239498, + "grad_norm": 2.851317801981692, + "learning_rate": 2.2451914713312917e-06, + "loss": 0.4791, + "step": 8591 + }, + { + "epoch": 0.3545432037633077, + "grad_norm": 2.8889263654690565, + "learning_rate": 2.2450174779037328e-06, + "loss": 0.4847, + "step": 8592 + }, + { + "epoch": 0.3545844681026657, + "grad_norm": 3.7397312288899895, + "learning_rate": 2.244843471168299e-06, + "loss": 0.4892, + "step": 8593 + }, + { + "epoch": 0.3546257324420236, + "grad_norm": 2.9893632745399503, + "learning_rate": 2.2446694511280973e-06, + "loss": 0.5629, + "step": 8594 + }, + { + "epoch": 0.3546669967813815, + "grad_norm": 3.4760944376994223, + "learning_rate": 2.244495417786236e-06, + "loss": 0.5297, + "step": 8595 + }, + { + "epoch": 0.35470826112073944, + "grad_norm": 3.7063198622446984, + "learning_rate": 2.2443213711458243e-06, + "loss": 0.5534, + "step": 8596 + }, + { + "epoch": 0.35474952546009736, + "grad_norm": 3.334381680591976, + "learning_rate": 2.2441473112099706e-06, + "loss": 0.5367, + "step": 8597 + }, + { + "epoch": 0.35479078979945533, + "grad_norm": 3.26445445751994, + "learning_rate": 2.2439732379817847e-06, + "loss": 0.5293, + "step": 8598 + }, + { + "epoch": 0.35483205413881325, + "grad_norm": 4.074888474860607, + "learning_rate": 2.2437991514643757e-06, + "loss": 0.5647, + "step": 8599 + }, + { + "epoch": 0.35487331847817116, + "grad_norm": 14.420641740740873, + "learning_rate": 2.2436250516608533e-06, + "loss": 0.4864, + "step": 8600 + }, + { + "epoch": 0.3549145828175291, + "grad_norm": 2.349942279421466, + "learning_rate": 2.2434509385743268e-06, + "loss": 0.539, + "step": 8601 + }, + { + "epoch": 0.354955847156887, + "grad_norm": 14.077764204284287, + "learning_rate": 2.2432768122079067e-06, + "loss": 0.4649, + "step": 8602 + }, + { + "epoch": 0.35499711149624497, + "grad_norm": 2.4857174902239385, + "learning_rate": 2.2431026725647034e-06, + "loss": 0.5187, + "step": 8603 + }, + { + "epoch": 0.3550383758356029, + "grad_norm": 4.236738188911601, + "learning_rate": 2.2429285196478277e-06, + "loss": 0.5593, + "step": 8604 + }, + { + "epoch": 0.3550796401749608, + "grad_norm": 2.6935169878040894, + "learning_rate": 2.2427543534603894e-06, + "loss": 0.506, + "step": 8605 + }, + { + "epoch": 0.3551209045143187, + "grad_norm": 2.6856698548293534, + "learning_rate": 2.242580174005501e-06, + "loss": 0.526, + "step": 8606 + }, + { + "epoch": 0.35516216885367663, + "grad_norm": 3.165881323561553, + "learning_rate": 2.242405981286273e-06, + "loss": 0.5375, + "step": 8607 + }, + { + "epoch": 0.3552034331930346, + "grad_norm": 2.1233646167240727, + "learning_rate": 2.2422317753058167e-06, + "loss": 0.5669, + "step": 8608 + }, + { + "epoch": 0.3552446975323925, + "grad_norm": 3.5558888818112537, + "learning_rate": 2.2420575560672437e-06, + "loss": 0.5484, + "step": 8609 + }, + { + "epoch": 0.35528596187175043, + "grad_norm": 3.194359333565739, + "learning_rate": 2.2418833235736673e-06, + "loss": 0.5037, + "step": 8610 + }, + { + "epoch": 0.35532722621110835, + "grad_norm": 7.7941845909270935, + "learning_rate": 2.241709077828198e-06, + "loss": 0.5653, + "step": 8611 + }, + { + "epoch": 0.35536849055046626, + "grad_norm": 7.495494841078905, + "learning_rate": 2.2415348188339496e-06, + "loss": 0.5719, + "step": 8612 + }, + { + "epoch": 0.35540975488982424, + "grad_norm": 6.835318209979054, + "learning_rate": 2.2413605465940344e-06, + "loss": 0.5724, + "step": 8613 + }, + { + "epoch": 0.35545101922918215, + "grad_norm": 2.525993501805656, + "learning_rate": 2.241186261111565e-06, + "loss": 0.5527, + "step": 8614 + }, + { + "epoch": 0.35549228356854007, + "grad_norm": 4.075309292467443, + "learning_rate": 2.241011962389655e-06, + "loss": 0.5841, + "step": 8615 + }, + { + "epoch": 0.355533547907898, + "grad_norm": 10.023224618148147, + "learning_rate": 2.2408376504314174e-06, + "loss": 0.5237, + "step": 8616 + }, + { + "epoch": 0.3555748122472559, + "grad_norm": 3.9213843396419805, + "learning_rate": 2.240663325239966e-06, + "loss": 0.5687, + "step": 8617 + }, + { + "epoch": 0.35561607658661387, + "grad_norm": 4.89262961649147, + "learning_rate": 2.240488986818415e-06, + "loss": 0.5716, + "step": 8618 + }, + { + "epoch": 0.3556573409259718, + "grad_norm": 3.7451486917095402, + "learning_rate": 2.2403146351698778e-06, + "loss": 0.5552, + "step": 8619 + }, + { + "epoch": 0.3556986052653297, + "grad_norm": 3.4572097833537336, + "learning_rate": 2.24014027029747e-06, + "loss": 0.5401, + "step": 8620 + }, + { + "epoch": 0.3557398696046876, + "grad_norm": 4.089738611107249, + "learning_rate": 2.239965892204305e-06, + "loss": 0.5616, + "step": 8621 + }, + { + "epoch": 0.35578113394404554, + "grad_norm": 10.82545303240914, + "learning_rate": 2.239791500893498e-06, + "loss": 0.6131, + "step": 8622 + }, + { + "epoch": 0.3558223982834035, + "grad_norm": 3.5835404229523173, + "learning_rate": 2.239617096368164e-06, + "loss": 0.5817, + "step": 8623 + }, + { + "epoch": 0.3558636626227614, + "grad_norm": 5.790469769355042, + "learning_rate": 2.2394426786314187e-06, + "loss": 0.546, + "step": 8624 + }, + { + "epoch": 0.35590492696211934, + "grad_norm": 3.5754272022368117, + "learning_rate": 2.239268247686377e-06, + "loss": 0.5174, + "step": 8625 + }, + { + "epoch": 0.35594619130147725, + "grad_norm": 10.316141751943087, + "learning_rate": 2.2390938035361557e-06, + "loss": 0.5662, + "step": 8626 + }, + { + "epoch": 0.35598745564083517, + "grad_norm": 4.9386701867125025, + "learning_rate": 2.23891934618387e-06, + "loss": 0.4724, + "step": 8627 + }, + { + "epoch": 0.35602871998019314, + "grad_norm": 2.643112798843972, + "learning_rate": 2.238744875632636e-06, + "loss": 0.5449, + "step": 8628 + }, + { + "epoch": 0.35606998431955106, + "grad_norm": 2.8022049939596676, + "learning_rate": 2.2385703918855703e-06, + "loss": 0.5317, + "step": 8629 + }, + { + "epoch": 0.356111248658909, + "grad_norm": 5.358323658297803, + "learning_rate": 2.23839589494579e-06, + "loss": 0.5544, + "step": 8630 + }, + { + "epoch": 0.3561525129982669, + "grad_norm": 3.8239389540350257, + "learning_rate": 2.2382213848164115e-06, + "loss": 0.5593, + "step": 8631 + }, + { + "epoch": 0.3561937773376248, + "grad_norm": 8.595022456886289, + "learning_rate": 2.238046861500553e-06, + "loss": 0.5634, + "step": 8632 + }, + { + "epoch": 0.3562350416769828, + "grad_norm": 38.381695997303254, + "learning_rate": 2.237872325001331e-06, + "loss": 0.5798, + "step": 8633 + }, + { + "epoch": 0.3562763060163407, + "grad_norm": 2.488579900745787, + "learning_rate": 2.237697775321863e-06, + "loss": 0.5091, + "step": 8634 + }, + { + "epoch": 0.3563175703556986, + "grad_norm": 2.8666444003140414, + "learning_rate": 2.2375232124652677e-06, + "loss": 0.5289, + "step": 8635 + }, + { + "epoch": 0.3563588346950565, + "grad_norm": 3.3687349063252032, + "learning_rate": 2.2373486364346626e-06, + "loss": 0.5324, + "step": 8636 + }, + { + "epoch": 0.35640009903441444, + "grad_norm": 3.231400023254109, + "learning_rate": 2.2371740472331665e-06, + "loss": 0.548, + "step": 8637 + }, + { + "epoch": 0.3564413633737724, + "grad_norm": 4.408339235501237, + "learning_rate": 2.2369994448638974e-06, + "loss": 0.5665, + "step": 8638 + }, + { + "epoch": 0.35648262771313033, + "grad_norm": 3.9761066531162736, + "learning_rate": 2.2368248293299753e-06, + "loss": 0.5685, + "step": 8639 + }, + { + "epoch": 0.35652389205248824, + "grad_norm": 2.9632063654653296, + "learning_rate": 2.2366502006345173e-06, + "loss": 0.4888, + "step": 8640 + }, + { + "epoch": 0.35656515639184616, + "grad_norm": 15.829797258012443, + "learning_rate": 2.236475558780645e-06, + "loss": 0.5377, + "step": 8641 + }, + { + "epoch": 0.3566064207312041, + "grad_norm": 2.49671877821101, + "learning_rate": 2.2363009037714764e-06, + "loss": 0.5528, + "step": 8642 + }, + { + "epoch": 0.356647685070562, + "grad_norm": 4.943808904874867, + "learning_rate": 2.236126235610132e-06, + "loss": 0.5357, + "step": 8643 + }, + { + "epoch": 0.35668894940991996, + "grad_norm": 3.170373940063131, + "learning_rate": 2.2359515542997307e-06, + "loss": 0.5228, + "step": 8644 + }, + { + "epoch": 0.3567302137492779, + "grad_norm": 5.950970931630001, + "learning_rate": 2.2357768598433944e-06, + "loss": 0.5501, + "step": 8645 + }, + { + "epoch": 0.3567714780886358, + "grad_norm": 5.119745241702349, + "learning_rate": 2.2356021522442424e-06, + "loss": 0.5208, + "step": 8646 + }, + { + "epoch": 0.3568127424279937, + "grad_norm": 2.9667790518207333, + "learning_rate": 2.235427431505396e-06, + "loss": 0.5899, + "step": 8647 + }, + { + "epoch": 0.35685400676735163, + "grad_norm": 2.9577717228051457, + "learning_rate": 2.235252697629976e-06, + "loss": 0.5171, + "step": 8648 + }, + { + "epoch": 0.3568952711067096, + "grad_norm": 3.127496042749394, + "learning_rate": 2.235077950621103e-06, + "loss": 0.5152, + "step": 8649 + }, + { + "epoch": 0.3569365354460675, + "grad_norm": 4.46743895919295, + "learning_rate": 2.234903190481899e-06, + "loss": 0.5555, + "step": 8650 + }, + { + "epoch": 0.35697779978542543, + "grad_norm": 8.959732567962636, + "learning_rate": 2.234728417215486e-06, + "loss": 0.5831, + "step": 8651 + }, + { + "epoch": 0.35701906412478335, + "grad_norm": 4.769324414442807, + "learning_rate": 2.234553630824985e-06, + "loss": 0.5068, + "step": 8652 + }, + { + "epoch": 0.35706032846414126, + "grad_norm": 11.630862508289596, + "learning_rate": 2.234378831313519e-06, + "loss": 0.5427, + "step": 8653 + }, + { + "epoch": 0.35710159280349923, + "grad_norm": 4.105445563325742, + "learning_rate": 2.23420401868421e-06, + "loss": 0.4865, + "step": 8654 + }, + { + "epoch": 0.35714285714285715, + "grad_norm": 4.086650876398245, + "learning_rate": 2.2340291929401805e-06, + "loss": 0.4957, + "step": 8655 + }, + { + "epoch": 0.35718412148221507, + "grad_norm": 2.1752913897359214, + "learning_rate": 2.2338543540845527e-06, + "loss": 0.5335, + "step": 8656 + }, + { + "epoch": 0.357225385821573, + "grad_norm": 2.1666027602443436, + "learning_rate": 2.233679502120451e-06, + "loss": 0.5402, + "step": 8657 + }, + { + "epoch": 0.3572666501609309, + "grad_norm": 4.462512577094962, + "learning_rate": 2.2335046370509974e-06, + "loss": 0.5105, + "step": 8658 + }, + { + "epoch": 0.35730791450028887, + "grad_norm": 4.969301755126732, + "learning_rate": 2.2333297588793168e-06, + "loss": 0.5767, + "step": 8659 + }, + { + "epoch": 0.3573491788396468, + "grad_norm": 2.145245548316094, + "learning_rate": 2.2331548676085317e-06, + "loss": 0.5193, + "step": 8660 + }, + { + "epoch": 0.3573904431790047, + "grad_norm": 2.8310182035380738, + "learning_rate": 2.232979963241767e-06, + "loss": 0.5934, + "step": 8661 + }, + { + "epoch": 0.3574317075183626, + "grad_norm": 2.980999701475464, + "learning_rate": 2.2328050457821456e-06, + "loss": 0.4752, + "step": 8662 + }, + { + "epoch": 0.35747297185772053, + "grad_norm": 2.8187820419752487, + "learning_rate": 2.232630115232794e-06, + "loss": 0.6213, + "step": 8663 + }, + { + "epoch": 0.3575142361970785, + "grad_norm": 3.7827026109201247, + "learning_rate": 2.2324551715968346e-06, + "loss": 0.5476, + "step": 8664 + }, + { + "epoch": 0.3575555005364364, + "grad_norm": 2.888576520274723, + "learning_rate": 2.2322802148773942e-06, + "loss": 0.5671, + "step": 8665 + }, + { + "epoch": 0.35759676487579434, + "grad_norm": 3.5941517278085504, + "learning_rate": 2.2321052450775976e-06, + "loss": 0.5524, + "step": 8666 + }, + { + "epoch": 0.35763802921515225, + "grad_norm": 2.890539534750378, + "learning_rate": 2.2319302622005694e-06, + "loss": 0.5512, + "step": 8667 + }, + { + "epoch": 0.35767929355451017, + "grad_norm": 3.3529301951936885, + "learning_rate": 2.231755266249436e-06, + "loss": 0.5444, + "step": 8668 + }, + { + "epoch": 0.35772055789386814, + "grad_norm": 2.791540412757556, + "learning_rate": 2.2315802572273227e-06, + "loss": 0.5421, + "step": 8669 + }, + { + "epoch": 0.35776182223322606, + "grad_norm": 2.7148774849865482, + "learning_rate": 2.231405235137356e-06, + "loss": 0.5056, + "step": 8670 + }, + { + "epoch": 0.357803086572584, + "grad_norm": 3.25746765558568, + "learning_rate": 2.2312301999826623e-06, + "loss": 0.5399, + "step": 8671 + }, + { + "epoch": 0.3578443509119419, + "grad_norm": 227.51942336644348, + "learning_rate": 2.2310551517663676e-06, + "loss": 0.5785, + "step": 8672 + }, + { + "epoch": 0.3578856152512998, + "grad_norm": 2.4987211958599373, + "learning_rate": 2.2308800904915994e-06, + "loss": 0.5472, + "step": 8673 + }, + { + "epoch": 0.3579268795906578, + "grad_norm": 5.767893639251174, + "learning_rate": 2.230705016161484e-06, + "loss": 0.5865, + "step": 8674 + }, + { + "epoch": 0.3579681439300157, + "grad_norm": 2.313310973806204, + "learning_rate": 2.230529928779149e-06, + "loss": 0.5472, + "step": 8675 + }, + { + "epoch": 0.3580094082693736, + "grad_norm": 3.2306908789077093, + "learning_rate": 2.2303548283477226e-06, + "loss": 0.5302, + "step": 8676 + }, + { + "epoch": 0.3580506726087315, + "grad_norm": 3.5719190022503797, + "learning_rate": 2.2301797148703316e-06, + "loss": 0.5258, + "step": 8677 + }, + { + "epoch": 0.35809193694808944, + "grad_norm": 4.477573100445345, + "learning_rate": 2.230004588350104e-06, + "loss": 0.5274, + "step": 8678 + }, + { + "epoch": 0.3581332012874474, + "grad_norm": 3.9128709000820057, + "learning_rate": 2.2298294487901686e-06, + "loss": 0.6084, + "step": 8679 + }, + { + "epoch": 0.3581744656268053, + "grad_norm": 2.5321233889566295, + "learning_rate": 2.2296542961936536e-06, + "loss": 0.5547, + "step": 8680 + }, + { + "epoch": 0.35821572996616324, + "grad_norm": 4.602581112913656, + "learning_rate": 2.229479130563687e-06, + "loss": 0.5444, + "step": 8681 + }, + { + "epoch": 0.35825699430552116, + "grad_norm": 5.835488748014076, + "learning_rate": 2.2293039519033988e-06, + "loss": 0.5326, + "step": 8682 + }, + { + "epoch": 0.3582982586448791, + "grad_norm": 2.857140107472748, + "learning_rate": 2.2291287602159173e-06, + "loss": 0.5001, + "step": 8683 + }, + { + "epoch": 0.35833952298423705, + "grad_norm": 30.489387300283507, + "learning_rate": 2.2289535555043725e-06, + "loss": 0.6184, + "step": 8684 + }, + { + "epoch": 0.35838078732359496, + "grad_norm": 2.7002217715287085, + "learning_rate": 2.228778337771893e-06, + "loss": 0.4762, + "step": 8685 + }, + { + "epoch": 0.3584220516629529, + "grad_norm": 2.4019424308224777, + "learning_rate": 2.2286031070216096e-06, + "loss": 0.5303, + "step": 8686 + }, + { + "epoch": 0.3584633160023108, + "grad_norm": 2.616346106439867, + "learning_rate": 2.228427863256652e-06, + "loss": 0.5697, + "step": 8687 + }, + { + "epoch": 0.3585045803416687, + "grad_norm": 2.764078449565268, + "learning_rate": 2.228252606480151e-06, + "loss": 0.4928, + "step": 8688 + }, + { + "epoch": 0.3585458446810267, + "grad_norm": 3.950995894165978, + "learning_rate": 2.2280773366952356e-06, + "loss": 0.521, + "step": 8689 + }, + { + "epoch": 0.3585871090203846, + "grad_norm": 6.508103625905636, + "learning_rate": 2.2279020539050386e-06, + "loss": 0.5839, + "step": 8690 + }, + { + "epoch": 0.3586283733597425, + "grad_norm": 6.486242679833972, + "learning_rate": 2.2277267581126894e-06, + "loss": 0.5595, + "step": 8691 + }, + { + "epoch": 0.35866963769910043, + "grad_norm": 4.764044425293341, + "learning_rate": 2.2275514493213203e-06, + "loss": 0.533, + "step": 8692 + }, + { + "epoch": 0.35871090203845835, + "grad_norm": 3.0709578206894514, + "learning_rate": 2.227376127534062e-06, + "loss": 0.5297, + "step": 8693 + }, + { + "epoch": 0.3587521663778163, + "grad_norm": 3.140672584856835, + "learning_rate": 2.2272007927540465e-06, + "loss": 0.5251, + "step": 8694 + }, + { + "epoch": 0.35879343071717423, + "grad_norm": 12.502075428651935, + "learning_rate": 2.227025444984405e-06, + "loss": 0.4999, + "step": 8695 + }, + { + "epoch": 0.35883469505653215, + "grad_norm": 9.360282817290456, + "learning_rate": 2.226850084228271e-06, + "loss": 0.453, + "step": 8696 + }, + { + "epoch": 0.35887595939589007, + "grad_norm": 3.1794036820915625, + "learning_rate": 2.226674710488776e-06, + "loss": 0.5476, + "step": 8697 + }, + { + "epoch": 0.358917223735248, + "grad_norm": 2.711883298466847, + "learning_rate": 2.226499323769053e-06, + "loss": 0.5684, + "step": 8698 + }, + { + "epoch": 0.35895848807460595, + "grad_norm": 2.7666761486875853, + "learning_rate": 2.2263239240722347e-06, + "loss": 0.5449, + "step": 8699 + }, + { + "epoch": 0.35899975241396387, + "grad_norm": 7.50265536129242, + "learning_rate": 2.226148511401454e-06, + "loss": 0.5011, + "step": 8700 + }, + { + "epoch": 0.3590410167533218, + "grad_norm": 5.33306194958686, + "learning_rate": 2.2259730857598437e-06, + "loss": 0.5211, + "step": 8701 + }, + { + "epoch": 0.3590822810926797, + "grad_norm": 3.111802765372468, + "learning_rate": 2.225797647150539e-06, + "loss": 0.5539, + "step": 8702 + }, + { + "epoch": 0.3591235454320376, + "grad_norm": 3.699009203556698, + "learning_rate": 2.225622195576672e-06, + "loss": 0.5543, + "step": 8703 + }, + { + "epoch": 0.35916480977139553, + "grad_norm": 2.5616220465996915, + "learning_rate": 2.2254467310413777e-06, + "loss": 0.577, + "step": 8704 + }, + { + "epoch": 0.3592060741107535, + "grad_norm": 2.520480936750761, + "learning_rate": 2.2252712535477897e-06, + "loss": 0.5304, + "step": 8705 + }, + { + "epoch": 0.3592473384501114, + "grad_norm": 6.0656861442900825, + "learning_rate": 2.225095763099043e-06, + "loss": 0.5225, + "step": 8706 + }, + { + "epoch": 0.35928860278946934, + "grad_norm": 9.307562124080585, + "learning_rate": 2.224920259698272e-06, + "loss": 0.5846, + "step": 8707 + }, + { + "epoch": 0.35932986712882725, + "grad_norm": 3.019582547957392, + "learning_rate": 2.2247447433486115e-06, + "loss": 0.4856, + "step": 8708 + }, + { + "epoch": 0.35937113146818517, + "grad_norm": 4.1929209015272635, + "learning_rate": 2.224569214053197e-06, + "loss": 0.5372, + "step": 8709 + }, + { + "epoch": 0.35941239580754314, + "grad_norm": 31.203209871198574, + "learning_rate": 2.224393671815164e-06, + "loss": 0.5019, + "step": 8710 + }, + { + "epoch": 0.35945366014690106, + "grad_norm": 2.6893321799775625, + "learning_rate": 2.2242181166376476e-06, + "loss": 0.5356, + "step": 8711 + }, + { + "epoch": 0.35949492448625897, + "grad_norm": 9.285996646429327, + "learning_rate": 2.224042548523784e-06, + "loss": 0.5473, + "step": 8712 + }, + { + "epoch": 0.3595361888256169, + "grad_norm": 2.4142586652299705, + "learning_rate": 2.2238669674767094e-06, + "loss": 0.5399, + "step": 8713 + }, + { + "epoch": 0.3595774531649748, + "grad_norm": 2.4141051371479656, + "learning_rate": 2.223691373499559e-06, + "loss": 0.5446, + "step": 8714 + }, + { + "epoch": 0.3596187175043328, + "grad_norm": 19.17148589046478, + "learning_rate": 2.2235157665954716e-06, + "loss": 0.5371, + "step": 8715 + }, + { + "epoch": 0.3596599818436907, + "grad_norm": 3.0735635904776064, + "learning_rate": 2.223340146767582e-06, + "loss": 0.527, + "step": 8716 + }, + { + "epoch": 0.3597012461830486, + "grad_norm": 3.0667658194115432, + "learning_rate": 2.2231645140190277e-06, + "loss": 0.5101, + "step": 8717 + }, + { + "epoch": 0.3597425105224065, + "grad_norm": 4.771305219264665, + "learning_rate": 2.2229888683529463e-06, + "loss": 0.5595, + "step": 8718 + }, + { + "epoch": 0.35978377486176444, + "grad_norm": 2.1358825607682888, + "learning_rate": 2.222813209772475e-06, + "loss": 0.5574, + "step": 8719 + }, + { + "epoch": 0.3598250392011224, + "grad_norm": 2.5620678330567115, + "learning_rate": 2.2226375382807517e-06, + "loss": 0.5349, + "step": 8720 + }, + { + "epoch": 0.3598663035404803, + "grad_norm": 12.876647112295506, + "learning_rate": 2.222461853880914e-06, + "loss": 0.5268, + "step": 8721 + }, + { + "epoch": 0.35990756787983824, + "grad_norm": 3.0884919930384114, + "learning_rate": 2.2222861565761e-06, + "loss": 0.5672, + "step": 8722 + }, + { + "epoch": 0.35994883221919616, + "grad_norm": 2.367721030451192, + "learning_rate": 2.2221104463694494e-06, + "loss": 0.5282, + "step": 8723 + }, + { + "epoch": 0.3599900965585541, + "grad_norm": 3.215785889661267, + "learning_rate": 2.2219347232640987e-06, + "loss": 0.5308, + "step": 8724 + }, + { + "epoch": 0.36003136089791204, + "grad_norm": 2.6511895105829724, + "learning_rate": 2.2217589872631884e-06, + "loss": 0.4755, + "step": 8725 + }, + { + "epoch": 0.36007262523726996, + "grad_norm": 2.5508061786604936, + "learning_rate": 2.221583238369857e-06, + "loss": 0.5847, + "step": 8726 + }, + { + "epoch": 0.3601138895766279, + "grad_norm": 5.586147368423746, + "learning_rate": 2.2214074765872437e-06, + "loss": 0.5464, + "step": 8727 + }, + { + "epoch": 0.3601551539159858, + "grad_norm": 3.1029439752634995, + "learning_rate": 2.2212317019184884e-06, + "loss": 0.5899, + "step": 8728 + }, + { + "epoch": 0.3601964182553437, + "grad_norm": 2.8800595861635614, + "learning_rate": 2.2210559143667303e-06, + "loss": 0.5669, + "step": 8729 + }, + { + "epoch": 0.3602376825947017, + "grad_norm": 3.878001464343028, + "learning_rate": 2.22088011393511e-06, + "loss": 0.5132, + "step": 8730 + }, + { + "epoch": 0.3602789469340596, + "grad_norm": 4.198727389383711, + "learning_rate": 2.2207043006267676e-06, + "loss": 0.6035, + "step": 8731 + }, + { + "epoch": 0.3603202112734175, + "grad_norm": 4.006026034775067, + "learning_rate": 2.220528474444843e-06, + "loss": 0.5653, + "step": 8732 + }, + { + "epoch": 0.36036147561277543, + "grad_norm": 3.3882647957957404, + "learning_rate": 2.220352635392478e-06, + "loss": 0.5448, + "step": 8733 + }, + { + "epoch": 0.36040273995213334, + "grad_norm": 3.2173175125882514, + "learning_rate": 2.220176783472812e-06, + "loss": 0.5504, + "step": 8734 + }, + { + "epoch": 0.3604440042914913, + "grad_norm": 2.0005665571752065, + "learning_rate": 2.220000918688988e-06, + "loss": 0.5265, + "step": 8735 + }, + { + "epoch": 0.36048526863084923, + "grad_norm": 3.1079385637118424, + "learning_rate": 2.2198250410441457e-06, + "loss": 0.5386, + "step": 8736 + }, + { + "epoch": 0.36052653297020715, + "grad_norm": 3.082545010972549, + "learning_rate": 2.219649150541428e-06, + "loss": 0.5485, + "step": 8737 + }, + { + "epoch": 0.36056779730956506, + "grad_norm": 3.4474273160198106, + "learning_rate": 2.219473247183976e-06, + "loss": 0.5818, + "step": 8738 + }, + { + "epoch": 0.360609061648923, + "grad_norm": 3.241075552304767, + "learning_rate": 2.219297330974932e-06, + "loss": 0.5368, + "step": 8739 + }, + { + "epoch": 0.36065032598828095, + "grad_norm": 4.6315113797063585, + "learning_rate": 2.2191214019174376e-06, + "loss": 0.5542, + "step": 8740 + }, + { + "epoch": 0.36069159032763887, + "grad_norm": 3.996239605338426, + "learning_rate": 2.218945460014637e-06, + "loss": 0.5505, + "step": 8741 + }, + { + "epoch": 0.3607328546669968, + "grad_norm": 3.694255408531014, + "learning_rate": 2.2187695052696712e-06, + "loss": 0.5726, + "step": 8742 + }, + { + "epoch": 0.3607741190063547, + "grad_norm": 2.4378364004310025, + "learning_rate": 2.2185935376856846e-06, + "loss": 0.5247, + "step": 8743 + }, + { + "epoch": 0.3608153833457126, + "grad_norm": 5.913815912852329, + "learning_rate": 2.218417557265819e-06, + "loss": 0.5051, + "step": 8744 + }, + { + "epoch": 0.3608566476850706, + "grad_norm": 3.430741406563403, + "learning_rate": 2.218241564013219e-06, + "loss": 0.5466, + "step": 8745 + }, + { + "epoch": 0.3608979120244285, + "grad_norm": 2.1511679136589734, + "learning_rate": 2.2180655579310284e-06, + "loss": 0.4923, + "step": 8746 + }, + { + "epoch": 0.3609391763637864, + "grad_norm": 2.682660888715343, + "learning_rate": 2.2178895390223903e-06, + "loss": 0.5712, + "step": 8747 + }, + { + "epoch": 0.36098044070314433, + "grad_norm": 6.0736924968668475, + "learning_rate": 2.217713507290449e-06, + "loss": 0.5386, + "step": 8748 + }, + { + "epoch": 0.36102170504250225, + "grad_norm": 2.1019494365592784, + "learning_rate": 2.217537462738349e-06, + "loss": 0.5139, + "step": 8749 + }, + { + "epoch": 0.3610629693818602, + "grad_norm": 3.781771922274886, + "learning_rate": 2.217361405369234e-06, + "loss": 0.5658, + "step": 8750 + }, + { + "epoch": 0.36110423372121814, + "grad_norm": 3.5087837825730435, + "learning_rate": 2.2171853351862515e-06, + "loss": 0.5434, + "step": 8751 + }, + { + "epoch": 0.36114549806057605, + "grad_norm": 4.532033436649531, + "learning_rate": 2.217009252192544e-06, + "loss": 0.5967, + "step": 8752 + }, + { + "epoch": 0.36118676239993397, + "grad_norm": 3.012259210029878, + "learning_rate": 2.2168331563912573e-06, + "loss": 0.4908, + "step": 8753 + }, + { + "epoch": 0.3612280267392919, + "grad_norm": 7.1358377319284365, + "learning_rate": 2.2166570477855375e-06, + "loss": 0.5171, + "step": 8754 + }, + { + "epoch": 0.36126929107864986, + "grad_norm": 2.3653555631120216, + "learning_rate": 2.2164809263785296e-06, + "loss": 0.5419, + "step": 8755 + }, + { + "epoch": 0.3613105554180078, + "grad_norm": 4.141102087034494, + "learning_rate": 2.2163047921733807e-06, + "loss": 0.5568, + "step": 8756 + }, + { + "epoch": 0.3613518197573657, + "grad_norm": 3.2748877253077313, + "learning_rate": 2.216128645173236e-06, + "loss": 0.5605, + "step": 8757 + }, + { + "epoch": 0.3613930840967236, + "grad_norm": 2.84094703298982, + "learning_rate": 2.2159524853812423e-06, + "loss": 0.5617, + "step": 8758 + }, + { + "epoch": 0.3614343484360815, + "grad_norm": 2.8886373040941864, + "learning_rate": 2.2157763128005457e-06, + "loss": 0.4861, + "step": 8759 + }, + { + "epoch": 0.3614756127754395, + "grad_norm": 5.0870190133178195, + "learning_rate": 2.215600127434294e-06, + "loss": 0.5297, + "step": 8760 + }, + { + "epoch": 0.3615168771147974, + "grad_norm": 2.6464984960313864, + "learning_rate": 2.2154239292856334e-06, + "loss": 0.5445, + "step": 8761 + }, + { + "epoch": 0.3615581414541553, + "grad_norm": 3.2973374867410157, + "learning_rate": 2.2152477183577123e-06, + "loss": 0.5413, + "step": 8762 + }, + { + "epoch": 0.36159940579351324, + "grad_norm": 3.5037030234232844, + "learning_rate": 2.2150714946536773e-06, + "loss": 0.5412, + "step": 8763 + }, + { + "epoch": 0.36164067013287116, + "grad_norm": 4.065799069313475, + "learning_rate": 2.2148952581766772e-06, + "loss": 0.5734, + "step": 8764 + }, + { + "epoch": 0.36168193447222907, + "grad_norm": 3.519917191282597, + "learning_rate": 2.214719008929859e-06, + "loss": 0.5545, + "step": 8765 + }, + { + "epoch": 0.36172319881158704, + "grad_norm": 3.550282897929203, + "learning_rate": 2.214542746916371e-06, + "loss": 0.5316, + "step": 8766 + }, + { + "epoch": 0.36176446315094496, + "grad_norm": 7.585719703219768, + "learning_rate": 2.214366472139362e-06, + "loss": 0.5665, + "step": 8767 + }, + { + "epoch": 0.3618057274903029, + "grad_norm": 2.8842806440725317, + "learning_rate": 2.214190184601981e-06, + "loss": 0.5845, + "step": 8768 + }, + { + "epoch": 0.3618469918296608, + "grad_norm": 4.4466397716500135, + "learning_rate": 2.2140138843073767e-06, + "loss": 0.4955, + "step": 8769 + }, + { + "epoch": 0.3618882561690187, + "grad_norm": 7.48549392892544, + "learning_rate": 2.213837571258698e-06, + "loss": 0.5367, + "step": 8770 + }, + { + "epoch": 0.3619295205083767, + "grad_norm": 7.68385420085295, + "learning_rate": 2.213661245459095e-06, + "loss": 0.5518, + "step": 8771 + }, + { + "epoch": 0.3619707848477346, + "grad_norm": 3.462017336042382, + "learning_rate": 2.2134849069117168e-06, + "loss": 0.6072, + "step": 8772 + }, + { + "epoch": 0.3620120491870925, + "grad_norm": 8.869223250449236, + "learning_rate": 2.213308555619713e-06, + "loss": 0.5489, + "step": 8773 + }, + { + "epoch": 0.3620533135264504, + "grad_norm": 3.2075264133088943, + "learning_rate": 2.213132191586234e-06, + "loss": 0.5403, + "step": 8774 + }, + { + "epoch": 0.36209457786580834, + "grad_norm": 3.251163220007451, + "learning_rate": 2.21295581481443e-06, + "loss": 0.5633, + "step": 8775 + }, + { + "epoch": 0.3621358422051663, + "grad_norm": 10.277297025599475, + "learning_rate": 2.212779425307452e-06, + "loss": 0.489, + "step": 8776 + }, + { + "epoch": 0.36217710654452423, + "grad_norm": 5.602000221326542, + "learning_rate": 2.2126030230684503e-06, + "loss": 0.4893, + "step": 8777 + }, + { + "epoch": 0.36221837088388215, + "grad_norm": 5.094829750006618, + "learning_rate": 2.2124266081005756e-06, + "loss": 0.5045, + "step": 8778 + }, + { + "epoch": 0.36225963522324006, + "grad_norm": 11.374543925758461, + "learning_rate": 2.2122501804069795e-06, + "loss": 0.5498, + "step": 8779 + }, + { + "epoch": 0.362300899562598, + "grad_norm": 3.198250037158806, + "learning_rate": 2.212073739990814e-06, + "loss": 0.5369, + "step": 8780 + }, + { + "epoch": 0.36234216390195595, + "grad_norm": 2.8240782569641296, + "learning_rate": 2.211897286855229e-06, + "loss": 0.5254, + "step": 8781 + }, + { + "epoch": 0.36238342824131387, + "grad_norm": 1.9408718619135006, + "learning_rate": 2.2117208210033784e-06, + "loss": 0.5812, + "step": 8782 + }, + { + "epoch": 0.3624246925806718, + "grad_norm": 2.1465118421096125, + "learning_rate": 2.2115443424384133e-06, + "loss": 0.4737, + "step": 8783 + }, + { + "epoch": 0.3624659569200297, + "grad_norm": 4.662162470086455, + "learning_rate": 2.2113678511634863e-06, + "loss": 0.587, + "step": 8784 + }, + { + "epoch": 0.3625072212593876, + "grad_norm": 2.6423283069059944, + "learning_rate": 2.21119134718175e-06, + "loss": 0.5123, + "step": 8785 + }, + { + "epoch": 0.3625484855987456, + "grad_norm": 2.1572983048534717, + "learning_rate": 2.211014830496357e-06, + "loss": 0.4777, + "step": 8786 + }, + { + "epoch": 0.3625897499381035, + "grad_norm": 3.057008099019208, + "learning_rate": 2.2108383011104603e-06, + "loss": 0.4917, + "step": 8787 + }, + { + "epoch": 0.3626310142774614, + "grad_norm": 3.8037246183449316, + "learning_rate": 2.2106617590272136e-06, + "loss": 0.5291, + "step": 8788 + }, + { + "epoch": 0.36267227861681933, + "grad_norm": 4.756840190332199, + "learning_rate": 2.2104852042497696e-06, + "loss": 0.5834, + "step": 8789 + }, + { + "epoch": 0.36271354295617725, + "grad_norm": 6.0044957958442895, + "learning_rate": 2.210308636781283e-06, + "loss": 0.5168, + "step": 8790 + }, + { + "epoch": 0.3627548072955352, + "grad_norm": 4.72968269895792, + "learning_rate": 2.210132056624907e-06, + "loss": 0.5352, + "step": 8791 + }, + { + "epoch": 0.36279607163489314, + "grad_norm": 3.1281982767877317, + "learning_rate": 2.209955463783796e-06, + "loss": 0.5473, + "step": 8792 + }, + { + "epoch": 0.36283733597425105, + "grad_norm": 2.8792750747360163, + "learning_rate": 2.2097788582611044e-06, + "loss": 0.4967, + "step": 8793 + }, + { + "epoch": 0.36287860031360897, + "grad_norm": 6.27614051373014, + "learning_rate": 2.2096022400599865e-06, + "loss": 0.5419, + "step": 8794 + }, + { + "epoch": 0.3629198646529669, + "grad_norm": 2.6755909444802906, + "learning_rate": 2.209425609183598e-06, + "loss": 0.5359, + "step": 8795 + }, + { + "epoch": 0.36296112899232486, + "grad_norm": 2.552394050358651, + "learning_rate": 2.209248965635093e-06, + "loss": 0.5745, + "step": 8796 + }, + { + "epoch": 0.36300239333168277, + "grad_norm": 2.2892757605430227, + "learning_rate": 2.2090723094176275e-06, + "loss": 0.5151, + "step": 8797 + }, + { + "epoch": 0.3630436576710407, + "grad_norm": 3.7331860947248163, + "learning_rate": 2.2088956405343562e-06, + "loss": 0.5199, + "step": 8798 + }, + { + "epoch": 0.3630849220103986, + "grad_norm": 2.7056396588497136, + "learning_rate": 2.2087189589884363e-06, + "loss": 0.4694, + "step": 8799 + }, + { + "epoch": 0.3631261863497565, + "grad_norm": 3.3031163674066257, + "learning_rate": 2.208542264783022e-06, + "loss": 0.4941, + "step": 8800 + }, + { + "epoch": 0.3631674506891145, + "grad_norm": 3.0717614529175314, + "learning_rate": 2.208365557921271e-06, + "loss": 0.5143, + "step": 8801 + }, + { + "epoch": 0.3632087150284724, + "grad_norm": 6.991977744711675, + "learning_rate": 2.2081888384063385e-06, + "loss": 0.5612, + "step": 8802 + }, + { + "epoch": 0.3632499793678303, + "grad_norm": 4.135903199132379, + "learning_rate": 2.208012106241382e-06, + "loss": 0.5641, + "step": 8803 + }, + { + "epoch": 0.36329124370718824, + "grad_norm": 2.263324695449044, + "learning_rate": 2.207835361429558e-06, + "loss": 0.5628, + "step": 8804 + }, + { + "epoch": 0.36333250804654615, + "grad_norm": 3.6286301939477865, + "learning_rate": 2.207658603974024e-06, + "loss": 0.5489, + "step": 8805 + }, + { + "epoch": 0.3633737723859041, + "grad_norm": 2.8203444824995016, + "learning_rate": 2.2074818338779366e-06, + "loss": 0.5391, + "step": 8806 + }, + { + "epoch": 0.36341503672526204, + "grad_norm": 5.300587775415327, + "learning_rate": 2.207305051144454e-06, + "loss": 0.5507, + "step": 8807 + }, + { + "epoch": 0.36345630106461996, + "grad_norm": 3.914969157427576, + "learning_rate": 2.2071282557767336e-06, + "loss": 0.5362, + "step": 8808 + }, + { + "epoch": 0.3634975654039779, + "grad_norm": 3.6569287732623885, + "learning_rate": 2.206951447777934e-06, + "loss": 0.5455, + "step": 8809 + }, + { + "epoch": 0.3635388297433358, + "grad_norm": 2.275245593919664, + "learning_rate": 2.206774627151213e-06, + "loss": 0.5576, + "step": 8810 + }, + { + "epoch": 0.36358009408269376, + "grad_norm": 2.857897393746408, + "learning_rate": 2.2065977938997287e-06, + "loss": 0.5707, + "step": 8811 + }, + { + "epoch": 0.3636213584220517, + "grad_norm": 2.5915376799778485, + "learning_rate": 2.2064209480266404e-06, + "loss": 0.5148, + "step": 8812 + }, + { + "epoch": 0.3636626227614096, + "grad_norm": 7.635939752271864, + "learning_rate": 2.2062440895351066e-06, + "loss": 0.495, + "step": 8813 + }, + { + "epoch": 0.3637038871007675, + "grad_norm": 2.7417428045745114, + "learning_rate": 2.2060672184282864e-06, + "loss": 0.5353, + "step": 8814 + }, + { + "epoch": 0.3637451514401254, + "grad_norm": 5.427478179720776, + "learning_rate": 2.20589033470934e-06, + "loss": 0.6382, + "step": 8815 + }, + { + "epoch": 0.3637864157794834, + "grad_norm": 2.5773635428839743, + "learning_rate": 2.205713438381426e-06, + "loss": 0.5422, + "step": 8816 + }, + { + "epoch": 0.3638276801188413, + "grad_norm": 3.1589789930736623, + "learning_rate": 2.205536529447705e-06, + "loss": 0.478, + "step": 8817 + }, + { + "epoch": 0.36386894445819923, + "grad_norm": 2.4285995332452424, + "learning_rate": 2.2053596079113357e-06, + "loss": 0.5049, + "step": 8818 + }, + { + "epoch": 0.36391020879755714, + "grad_norm": 2.331380201660422, + "learning_rate": 2.20518267377548e-06, + "loss": 0.5276, + "step": 8819 + }, + { + "epoch": 0.36395147313691506, + "grad_norm": 5.633718715455394, + "learning_rate": 2.205005727043297e-06, + "loss": 0.527, + "step": 8820 + }, + { + "epoch": 0.36399273747627303, + "grad_norm": 2.6842117080546264, + "learning_rate": 2.204828767717949e-06, + "loss": 0.5884, + "step": 8821 + }, + { + "epoch": 0.36403400181563095, + "grad_norm": 2.5257120544538174, + "learning_rate": 2.2046517958025955e-06, + "loss": 0.4788, + "step": 8822 + }, + { + "epoch": 0.36407526615498886, + "grad_norm": 2.391146485391233, + "learning_rate": 2.204474811300398e-06, + "loss": 0.5515, + "step": 8823 + }, + { + "epoch": 0.3641165304943468, + "grad_norm": 1.7825909710968364, + "learning_rate": 2.204297814214518e-06, + "loss": 0.5686, + "step": 8824 + }, + { + "epoch": 0.3641577948337047, + "grad_norm": 1.8403559670158571, + "learning_rate": 2.204120804548118e-06, + "loss": 0.503, + "step": 8825 + }, + { + "epoch": 0.36419905917306267, + "grad_norm": 9.297089564951085, + "learning_rate": 2.203943782304358e-06, + "loss": 0.5413, + "step": 8826 + }, + { + "epoch": 0.3642403235124206, + "grad_norm": 3.1389100004128325, + "learning_rate": 2.203766747486401e-06, + "loss": 0.5093, + "step": 8827 + }, + { + "epoch": 0.3642815878517785, + "grad_norm": 4.993770403667679, + "learning_rate": 2.2035897000974095e-06, + "loss": 0.5816, + "step": 8828 + }, + { + "epoch": 0.3643228521911364, + "grad_norm": 3.5349393717570576, + "learning_rate": 2.2034126401405465e-06, + "loss": 0.5176, + "step": 8829 + }, + { + "epoch": 0.36436411653049433, + "grad_norm": 2.249739123684328, + "learning_rate": 2.203235567618974e-06, + "loss": 0.5488, + "step": 8830 + }, + { + "epoch": 0.36440538086985225, + "grad_norm": 3.18471871657347, + "learning_rate": 2.2030584825358543e-06, + "loss": 0.5162, + "step": 8831 + }, + { + "epoch": 0.3644466452092102, + "grad_norm": 3.4353194887617406, + "learning_rate": 2.2028813848943515e-06, + "loss": 0.5404, + "step": 8832 + }, + { + "epoch": 0.36448790954856813, + "grad_norm": 2.303025832021453, + "learning_rate": 2.202704274697629e-06, + "loss": 0.5353, + "step": 8833 + }, + { + "epoch": 0.36452917388792605, + "grad_norm": 4.631322353912948, + "learning_rate": 2.2025271519488505e-06, + "loss": 0.5341, + "step": 8834 + }, + { + "epoch": 0.36457043822728397, + "grad_norm": 3.8621718223779715, + "learning_rate": 2.2023500166511793e-06, + "loss": 0.5518, + "step": 8835 + }, + { + "epoch": 0.3646117025666419, + "grad_norm": 2.632928154107085, + "learning_rate": 2.20217286880778e-06, + "loss": 0.4707, + "step": 8836 + }, + { + "epoch": 0.36465296690599985, + "grad_norm": 2.6221375841350643, + "learning_rate": 2.2019957084218164e-06, + "loss": 0.5126, + "step": 8837 + }, + { + "epoch": 0.36469423124535777, + "grad_norm": 10.107555377494645, + "learning_rate": 2.201818535496454e-06, + "loss": 0.5715, + "step": 8838 + }, + { + "epoch": 0.3647354955847157, + "grad_norm": 28.241490502472356, + "learning_rate": 2.201641350034856e-06, + "loss": 0.4895, + "step": 8839 + }, + { + "epoch": 0.3647767599240736, + "grad_norm": 4.555124361372677, + "learning_rate": 2.2014641520401887e-06, + "loss": 0.561, + "step": 8840 + }, + { + "epoch": 0.3648180242634315, + "grad_norm": 3.0258109320587923, + "learning_rate": 2.2012869415156164e-06, + "loss": 0.6164, + "step": 8841 + }, + { + "epoch": 0.3648592886027895, + "grad_norm": 3.0780841996366726, + "learning_rate": 2.2011097184643056e-06, + "loss": 0.5399, + "step": 8842 + }, + { + "epoch": 0.3649005529421474, + "grad_norm": 4.8323681642177965, + "learning_rate": 2.200932482889421e-06, + "loss": 0.542, + "step": 8843 + }, + { + "epoch": 0.3649418172815053, + "grad_norm": 3.112190904089532, + "learning_rate": 2.200755234794129e-06, + "loss": 0.5179, + "step": 8844 + }, + { + "epoch": 0.36498308162086324, + "grad_norm": 3.4874033413205825, + "learning_rate": 2.2005779741815955e-06, + "loss": 0.5755, + "step": 8845 + }, + { + "epoch": 0.36502434596022115, + "grad_norm": 8.408703299279155, + "learning_rate": 2.2004007010549865e-06, + "loss": 0.5736, + "step": 8846 + }, + { + "epoch": 0.3650656102995791, + "grad_norm": 2.2246862873078284, + "learning_rate": 2.2002234154174687e-06, + "loss": 0.5038, + "step": 8847 + }, + { + "epoch": 0.36510687463893704, + "grad_norm": 2.851560388876344, + "learning_rate": 2.2000461172722097e-06, + "loss": 0.5712, + "step": 8848 + }, + { + "epoch": 0.36514813897829496, + "grad_norm": 2.7125761677801563, + "learning_rate": 2.1998688066223753e-06, + "loss": 0.5533, + "step": 8849 + }, + { + "epoch": 0.36518940331765287, + "grad_norm": 2.7358784448887623, + "learning_rate": 2.1996914834711333e-06, + "loss": 0.4982, + "step": 8850 + }, + { + "epoch": 0.3652306676570108, + "grad_norm": 4.5759095489191015, + "learning_rate": 2.1995141478216512e-06, + "loss": 0.5142, + "step": 8851 + }, + { + "epoch": 0.36527193199636876, + "grad_norm": 4.363026269370623, + "learning_rate": 2.1993367996770966e-06, + "loss": 0.4894, + "step": 8852 + }, + { + "epoch": 0.3653131963357267, + "grad_norm": 14.326909625056322, + "learning_rate": 2.199159439040637e-06, + "loss": 0.5537, + "step": 8853 + }, + { + "epoch": 0.3653544606750846, + "grad_norm": 4.433063812435462, + "learning_rate": 2.1989820659154407e-06, + "loss": 0.534, + "step": 8854 + }, + { + "epoch": 0.3653957250144425, + "grad_norm": 2.483774207176319, + "learning_rate": 2.198804680304676e-06, + "loss": 0.503, + "step": 8855 + }, + { + "epoch": 0.3654369893538004, + "grad_norm": 3.393191559585096, + "learning_rate": 2.1986272822115123e-06, + "loss": 0.555, + "step": 8856 + }, + { + "epoch": 0.3654782536931584, + "grad_norm": 2.6427000715538984, + "learning_rate": 2.198449871639117e-06, + "loss": 0.4991, + "step": 8857 + }, + { + "epoch": 0.3655195180325163, + "grad_norm": 3.118307920799844, + "learning_rate": 2.19827244859066e-06, + "loss": 0.4923, + "step": 8858 + }, + { + "epoch": 0.3655607823718742, + "grad_norm": 4.443868512803954, + "learning_rate": 2.19809501306931e-06, + "loss": 0.5329, + "step": 8859 + }, + { + "epoch": 0.36560204671123214, + "grad_norm": 2.761058846333729, + "learning_rate": 2.197917565078237e-06, + "loss": 0.5256, + "step": 8860 + }, + { + "epoch": 0.36564331105059006, + "grad_norm": 2.3856713555966786, + "learning_rate": 2.19774010462061e-06, + "loss": 0.5015, + "step": 8861 + }, + { + "epoch": 0.36568457538994803, + "grad_norm": 2.7214609551460005, + "learning_rate": 2.1975626316995997e-06, + "loss": 0.5136, + "step": 8862 + }, + { + "epoch": 0.36572583972930595, + "grad_norm": 2.1784268240264386, + "learning_rate": 2.1973851463183756e-06, + "loss": 0.5059, + "step": 8863 + }, + { + "epoch": 0.36576710406866386, + "grad_norm": 2.757009303579924, + "learning_rate": 2.197207648480108e-06, + "loss": 0.5444, + "step": 8864 + }, + { + "epoch": 0.3658083684080218, + "grad_norm": 7.683462475514395, + "learning_rate": 2.1970301381879678e-06, + "loss": 0.5321, + "step": 8865 + }, + { + "epoch": 0.3658496327473797, + "grad_norm": 2.113137292253185, + "learning_rate": 2.196852615445126e-06, + "loss": 0.5297, + "step": 8866 + }, + { + "epoch": 0.36589089708673767, + "grad_norm": 2.5361863620290372, + "learning_rate": 2.1966750802547522e-06, + "loss": 0.5155, + "step": 8867 + }, + { + "epoch": 0.3659321614260956, + "grad_norm": 4.466506649864051, + "learning_rate": 2.1964975326200196e-06, + "loss": 0.5262, + "step": 8868 + }, + { + "epoch": 0.3659734257654535, + "grad_norm": 2.840664369522125, + "learning_rate": 2.1963199725440985e-06, + "loss": 0.5551, + "step": 8869 + }, + { + "epoch": 0.3660146901048114, + "grad_norm": 3.4115236290476476, + "learning_rate": 2.1961424000301604e-06, + "loss": 0.5276, + "step": 8870 + }, + { + "epoch": 0.36605595444416933, + "grad_norm": 3.162672952538279, + "learning_rate": 2.195964815081378e-06, + "loss": 0.4895, + "step": 8871 + }, + { + "epoch": 0.3660972187835273, + "grad_norm": 3.2619311525132257, + "learning_rate": 2.1957872177009226e-06, + "loss": 0.5068, + "step": 8872 + }, + { + "epoch": 0.3661384831228852, + "grad_norm": 2.7280221609351125, + "learning_rate": 2.195609607891967e-06, + "loss": 0.5431, + "step": 8873 + }, + { + "epoch": 0.36617974746224313, + "grad_norm": 2.58156897934572, + "learning_rate": 2.195431985657684e-06, + "loss": 0.498, + "step": 8874 + }, + { + "epoch": 0.36622101180160105, + "grad_norm": 1.9678145013095472, + "learning_rate": 2.1952543510012456e-06, + "loss": 0.5834, + "step": 8875 + }, + { + "epoch": 0.36626227614095896, + "grad_norm": 5.670319186600421, + "learning_rate": 2.1950767039258255e-06, + "loss": 0.5472, + "step": 8876 + }, + { + "epoch": 0.36630354048031694, + "grad_norm": 2.4209565501398758, + "learning_rate": 2.194899044434597e-06, + "loss": 0.5362, + "step": 8877 + }, + { + "epoch": 0.36634480481967485, + "grad_norm": 3.4487614586067568, + "learning_rate": 2.194721372530732e-06, + "loss": 0.5349, + "step": 8878 + }, + { + "epoch": 0.36638606915903277, + "grad_norm": 2.552073268580141, + "learning_rate": 2.1945436882174067e-06, + "loss": 0.5565, + "step": 8879 + }, + { + "epoch": 0.3664273334983907, + "grad_norm": 3.3242436769868253, + "learning_rate": 2.1943659914977925e-06, + "loss": 0.5338, + "step": 8880 + }, + { + "epoch": 0.3664685978377486, + "grad_norm": 5.922512560394683, + "learning_rate": 2.194188282375065e-06, + "loss": 0.5925, + "step": 8881 + }, + { + "epoch": 0.36650986217710657, + "grad_norm": 15.712279586934857, + "learning_rate": 2.1940105608523983e-06, + "loss": 0.5718, + "step": 8882 + }, + { + "epoch": 0.3665511265164645, + "grad_norm": 3.828102008373162, + "learning_rate": 2.1938328269329673e-06, + "loss": 0.5648, + "step": 8883 + }, + { + "epoch": 0.3665923908558224, + "grad_norm": 14.575625870545052, + "learning_rate": 2.193655080619946e-06, + "loss": 0.5147, + "step": 8884 + }, + { + "epoch": 0.3666336551951803, + "grad_norm": 3.1583882994810493, + "learning_rate": 2.19347732191651e-06, + "loss": 0.5702, + "step": 8885 + }, + { + "epoch": 0.36667491953453824, + "grad_norm": 2.41260938292844, + "learning_rate": 2.1932995508258335e-06, + "loss": 0.5643, + "step": 8886 + }, + { + "epoch": 0.3667161838738962, + "grad_norm": 2.410671547253292, + "learning_rate": 2.193121767351093e-06, + "loss": 0.5237, + "step": 8887 + }, + { + "epoch": 0.3667574482132541, + "grad_norm": 10.858026153180134, + "learning_rate": 2.1929439714954636e-06, + "loss": 0.5273, + "step": 8888 + }, + { + "epoch": 0.36679871255261204, + "grad_norm": 3.771979891939538, + "learning_rate": 2.192766163262122e-06, + "loss": 0.558, + "step": 8889 + }, + { + "epoch": 0.36683997689196995, + "grad_norm": 2.072713639396017, + "learning_rate": 2.1925883426542434e-06, + "loss": 0.5214, + "step": 8890 + }, + { + "epoch": 0.36688124123132787, + "grad_norm": 3.8204386375023245, + "learning_rate": 2.1924105096750042e-06, + "loss": 0.4939, + "step": 8891 + }, + { + "epoch": 0.3669225055706858, + "grad_norm": 3.4995586988302603, + "learning_rate": 2.1922326643275814e-06, + "loss": 0.5578, + "step": 8892 + }, + { + "epoch": 0.36696376991004376, + "grad_norm": 3.826491055193043, + "learning_rate": 2.1920548066151512e-06, + "loss": 0.5208, + "step": 8893 + }, + { + "epoch": 0.3670050342494017, + "grad_norm": 2.7377936297046244, + "learning_rate": 2.191876936540891e-06, + "loss": 0.4957, + "step": 8894 + }, + { + "epoch": 0.3670462985887596, + "grad_norm": 3.0891114491967526, + "learning_rate": 2.191699054107978e-06, + "loss": 0.5525, + "step": 8895 + }, + { + "epoch": 0.3670875629281175, + "grad_norm": 2.095633957670848, + "learning_rate": 2.1915211593195897e-06, + "loss": 0.5096, + "step": 8896 + }, + { + "epoch": 0.3671288272674754, + "grad_norm": 6.23913883580554, + "learning_rate": 2.1913432521789034e-06, + "loss": 0.5314, + "step": 8897 + }, + { + "epoch": 0.3671700916068334, + "grad_norm": 3.939536779089486, + "learning_rate": 2.1911653326890968e-06, + "loss": 0.581, + "step": 8898 + }, + { + "epoch": 0.3672113559461913, + "grad_norm": 3.024267521473114, + "learning_rate": 2.1909874008533487e-06, + "loss": 0.5936, + "step": 8899 + }, + { + "epoch": 0.3672526202855492, + "grad_norm": 3.8926517174573556, + "learning_rate": 2.190809456674837e-06, + "loss": 0.6056, + "step": 8900 + }, + { + "epoch": 0.36729388462490714, + "grad_norm": 3.426863653305407, + "learning_rate": 2.1906315001567402e-06, + "loss": 0.5581, + "step": 8901 + }, + { + "epoch": 0.36733514896426506, + "grad_norm": 2.751599247904991, + "learning_rate": 2.190453531302237e-06, + "loss": 0.535, + "step": 8902 + }, + { + "epoch": 0.36737641330362303, + "grad_norm": 2.9664344098904962, + "learning_rate": 2.190275550114507e-06, + "loss": 0.5982, + "step": 8903 + }, + { + "epoch": 0.36741767764298094, + "grad_norm": 2.7622123238929572, + "learning_rate": 2.1900975565967284e-06, + "loss": 0.503, + "step": 8904 + }, + { + "epoch": 0.36745894198233886, + "grad_norm": 3.5066526434605407, + "learning_rate": 2.1899195507520814e-06, + "loss": 0.5779, + "step": 8905 + }, + { + "epoch": 0.3675002063216968, + "grad_norm": 2.8946546959097703, + "learning_rate": 2.189741532583745e-06, + "loss": 0.5613, + "step": 8906 + }, + { + "epoch": 0.3675414706610547, + "grad_norm": 5.973705858918917, + "learning_rate": 2.189563502094899e-06, + "loss": 0.5748, + "step": 8907 + }, + { + "epoch": 0.36758273500041266, + "grad_norm": 18.786997331850944, + "learning_rate": 2.1893854592887243e-06, + "loss": 0.5114, + "step": 8908 + }, + { + "epoch": 0.3676239993397706, + "grad_norm": 5.781159897679778, + "learning_rate": 2.1892074041684006e-06, + "loss": 0.5892, + "step": 8909 + }, + { + "epoch": 0.3676652636791285, + "grad_norm": 4.387457631438738, + "learning_rate": 2.189029336737109e-06, + "loss": 0.5571, + "step": 8910 + }, + { + "epoch": 0.3677065280184864, + "grad_norm": 2.643142798951641, + "learning_rate": 2.188851256998029e-06, + "loss": 0.6, + "step": 8911 + }, + { + "epoch": 0.3677477923578443, + "grad_norm": 3.952947170674396, + "learning_rate": 2.188673164954343e-06, + "loss": 0.5075, + "step": 8912 + }, + { + "epoch": 0.3677890566972023, + "grad_norm": 8.729968135855243, + "learning_rate": 2.188495060609231e-06, + "loss": 0.5268, + "step": 8913 + }, + { + "epoch": 0.3678303210365602, + "grad_norm": 4.591780259873988, + "learning_rate": 2.188316943965875e-06, + "loss": 0.5018, + "step": 8914 + }, + { + "epoch": 0.36787158537591813, + "grad_norm": 3.7752829928562743, + "learning_rate": 2.1881388150274567e-06, + "loss": 0.5087, + "step": 8915 + }, + { + "epoch": 0.36791284971527605, + "grad_norm": 3.826870641623151, + "learning_rate": 2.1879606737971575e-06, + "loss": 0.5828, + "step": 8916 + }, + { + "epoch": 0.36795411405463396, + "grad_norm": 2.8745758161163417, + "learning_rate": 2.1877825202781594e-06, + "loss": 0.4901, + "step": 8917 + }, + { + "epoch": 0.36799537839399193, + "grad_norm": 5.061478040773883, + "learning_rate": 2.1876043544736455e-06, + "loss": 0.5545, + "step": 8918 + }, + { + "epoch": 0.36803664273334985, + "grad_norm": 31.725083820781816, + "learning_rate": 2.1874261763867973e-06, + "loss": 0.5223, + "step": 8919 + }, + { + "epoch": 0.36807790707270777, + "grad_norm": 3.0281545597679567, + "learning_rate": 2.187247986020798e-06, + "loss": 0.5932, + "step": 8920 + }, + { + "epoch": 0.3681191714120657, + "grad_norm": 8.948809786237682, + "learning_rate": 2.1870697833788304e-06, + "loss": 0.5327, + "step": 8921 + }, + { + "epoch": 0.3681604357514236, + "grad_norm": 13.79246394915587, + "learning_rate": 2.186891568464078e-06, + "loss": 0.4992, + "step": 8922 + }, + { + "epoch": 0.36820170009078157, + "grad_norm": 6.806566094171043, + "learning_rate": 2.1867133412797235e-06, + "loss": 0.5764, + "step": 8923 + }, + { + "epoch": 0.3682429644301395, + "grad_norm": 3.554009953003318, + "learning_rate": 2.186535101828951e-06, + "loss": 0.5537, + "step": 8924 + }, + { + "epoch": 0.3682842287694974, + "grad_norm": 3.143498830920643, + "learning_rate": 2.1863568501149435e-06, + "loss": 0.549, + "step": 8925 + }, + { + "epoch": 0.3683254931088553, + "grad_norm": 5.910510321591433, + "learning_rate": 2.1861785861408865e-06, + "loss": 0.555, + "step": 8926 + }, + { + "epoch": 0.36836675744821323, + "grad_norm": 2.495111079933864, + "learning_rate": 2.186000309909963e-06, + "loss": 0.5645, + "step": 8927 + }, + { + "epoch": 0.3684080217875712, + "grad_norm": 2.4127424860089763, + "learning_rate": 2.185822021425358e-06, + "loss": 0.5364, + "step": 8928 + }, + { + "epoch": 0.3684492861269291, + "grad_norm": 2.648777742481896, + "learning_rate": 2.185643720690256e-06, + "loss": 0.4664, + "step": 8929 + }, + { + "epoch": 0.36849055046628704, + "grad_norm": 2.2009532328889394, + "learning_rate": 2.185465407707842e-06, + "loss": 0.5508, + "step": 8930 + }, + { + "epoch": 0.36853181480564495, + "grad_norm": 4.147763652085926, + "learning_rate": 2.1852870824813006e-06, + "loss": 0.4927, + "step": 8931 + }, + { + "epoch": 0.36857307914500287, + "grad_norm": 9.016609669793587, + "learning_rate": 2.1851087450138184e-06, + "loss": 0.5355, + "step": 8932 + }, + { + "epoch": 0.36861434348436084, + "grad_norm": 4.360287656064425, + "learning_rate": 2.1849303953085795e-06, + "loss": 0.49, + "step": 8933 + }, + { + "epoch": 0.36865560782371876, + "grad_norm": 2.488280805484958, + "learning_rate": 2.1847520333687704e-06, + "loss": 0.4663, + "step": 8934 + }, + { + "epoch": 0.36869687216307667, + "grad_norm": 2.7643490081716906, + "learning_rate": 2.1845736591975773e-06, + "loss": 0.581, + "step": 8935 + }, + { + "epoch": 0.3687381365024346, + "grad_norm": 1.841343058943545, + "learning_rate": 2.184395272798186e-06, + "loss": 0.5827, + "step": 8936 + }, + { + "epoch": 0.3687794008417925, + "grad_norm": 2.8161574657700754, + "learning_rate": 2.1842168741737826e-06, + "loss": 0.5076, + "step": 8937 + }, + { + "epoch": 0.3688206651811505, + "grad_norm": 3.0204690329830504, + "learning_rate": 2.1840384633275545e-06, + "loss": 0.5714, + "step": 8938 + }, + { + "epoch": 0.3688619295205084, + "grad_norm": 4.811269894226887, + "learning_rate": 2.183860040262688e-06, + "loss": 0.5256, + "step": 8939 + }, + { + "epoch": 0.3689031938598663, + "grad_norm": 3.6071827952196265, + "learning_rate": 2.1836816049823707e-06, + "loss": 0.5524, + "step": 8940 + }, + { + "epoch": 0.3689444581992242, + "grad_norm": 2.210548613627206, + "learning_rate": 2.18350315748979e-06, + "loss": 0.5043, + "step": 8941 + }, + { + "epoch": 0.36898572253858214, + "grad_norm": 5.576036738533143, + "learning_rate": 2.183324697788133e-06, + "loss": 0.5022, + "step": 8942 + }, + { + "epoch": 0.3690269868779401, + "grad_norm": 4.149223858691567, + "learning_rate": 2.1831462258805867e-06, + "loss": 0.5082, + "step": 8943 + }, + { + "epoch": 0.369068251217298, + "grad_norm": 6.508770027245799, + "learning_rate": 2.1829677417703403e-06, + "loss": 0.5209, + "step": 8944 + }, + { + "epoch": 0.36910951555665594, + "grad_norm": 2.274967583268502, + "learning_rate": 2.1827892454605813e-06, + "loss": 0.4786, + "step": 8945 + }, + { + "epoch": 0.36915077989601386, + "grad_norm": 2.4135938159487065, + "learning_rate": 2.182610736954498e-06, + "loss": 0.5443, + "step": 8946 + }, + { + "epoch": 0.3691920442353718, + "grad_norm": 2.982481740052624, + "learning_rate": 2.18243221625528e-06, + "loss": 0.5702, + "step": 8947 + }, + { + "epoch": 0.36923330857472975, + "grad_norm": 2.9392574823360302, + "learning_rate": 2.1822536833661152e-06, + "loss": 0.5039, + "step": 8948 + }, + { + "epoch": 0.36927457291408766, + "grad_norm": 2.993177595783313, + "learning_rate": 2.1820751382901935e-06, + "loss": 0.5594, + "step": 8949 + }, + { + "epoch": 0.3693158372534456, + "grad_norm": 2.3857637214433143, + "learning_rate": 2.1818965810307024e-06, + "loss": 0.5185, + "step": 8950 + }, + { + "epoch": 0.3693571015928035, + "grad_norm": 2.7201522094334307, + "learning_rate": 2.181718011590833e-06, + "loss": 0.5227, + "step": 8951 + }, + { + "epoch": 0.3693983659321614, + "grad_norm": 3.7979014449688195, + "learning_rate": 2.181539429973775e-06, + "loss": 0.5816, + "step": 8952 + }, + { + "epoch": 0.3694396302715193, + "grad_norm": 2.7489783223952915, + "learning_rate": 2.181360836182717e-06, + "loss": 0.5236, + "step": 8953 + }, + { + "epoch": 0.3694808946108773, + "grad_norm": 7.367663757737086, + "learning_rate": 2.1811822302208504e-06, + "loss": 0.5965, + "step": 8954 + }, + { + "epoch": 0.3695221589502352, + "grad_norm": 3.2182829612584247, + "learning_rate": 2.181003612091365e-06, + "loss": 0.5504, + "step": 8955 + }, + { + "epoch": 0.36956342328959313, + "grad_norm": 2.4105100498633956, + "learning_rate": 2.1808249817974517e-06, + "loss": 0.544, + "step": 8956 + }, + { + "epoch": 0.36960468762895105, + "grad_norm": 2.5811318312955613, + "learning_rate": 2.180646339342301e-06, + "loss": 0.5223, + "step": 8957 + }, + { + "epoch": 0.36964595196830896, + "grad_norm": 3.8005453731154084, + "learning_rate": 2.1804676847291033e-06, + "loss": 0.5571, + "step": 8958 + }, + { + "epoch": 0.36968721630766693, + "grad_norm": 5.712201047436132, + "learning_rate": 2.1802890179610516e-06, + "loss": 0.5417, + "step": 8959 + }, + { + "epoch": 0.36972848064702485, + "grad_norm": 5.027830530642762, + "learning_rate": 2.1801103390413352e-06, + "loss": 0.5434, + "step": 8960 + }, + { + "epoch": 0.36976974498638276, + "grad_norm": 3.2946538341631006, + "learning_rate": 2.179931647973147e-06, + "loss": 0.5478, + "step": 8961 + }, + { + "epoch": 0.3698110093257407, + "grad_norm": 2.22296506326765, + "learning_rate": 2.179752944759679e-06, + "loss": 0.5266, + "step": 8962 + }, + { + "epoch": 0.3698522736650986, + "grad_norm": 7.265973999954361, + "learning_rate": 2.179574229404123e-06, + "loss": 0.6012, + "step": 8963 + }, + { + "epoch": 0.36989353800445657, + "grad_norm": 4.482596530632053, + "learning_rate": 2.179395501909671e-06, + "loss": 0.5415, + "step": 8964 + }, + { + "epoch": 0.3699348023438145, + "grad_norm": 3.737069939606123, + "learning_rate": 2.179216762279516e-06, + "loss": 0.5071, + "step": 8965 + }, + { + "epoch": 0.3699760666831724, + "grad_norm": 2.938149330571995, + "learning_rate": 2.17903801051685e-06, + "loss": 0.515, + "step": 8966 + }, + { + "epoch": 0.3700173310225303, + "grad_norm": 4.256306920639094, + "learning_rate": 2.1788592466248666e-06, + "loss": 0.5922, + "step": 8967 + }, + { + "epoch": 0.37005859536188823, + "grad_norm": 6.732874930552001, + "learning_rate": 2.178680470606759e-06, + "loss": 0.5547, + "step": 8968 + }, + { + "epoch": 0.3700998597012462, + "grad_norm": 2.050438801306646, + "learning_rate": 2.1785016824657207e-06, + "loss": 0.4993, + "step": 8969 + }, + { + "epoch": 0.3701411240406041, + "grad_norm": 2.3531542233824836, + "learning_rate": 2.178322882204944e-06, + "loss": 0.5724, + "step": 8970 + }, + { + "epoch": 0.37018238837996204, + "grad_norm": 5.044043346767845, + "learning_rate": 2.1781440698276245e-06, + "loss": 0.4689, + "step": 8971 + }, + { + "epoch": 0.37022365271931995, + "grad_norm": 3.7677143766141223, + "learning_rate": 2.177965245336955e-06, + "loss": 0.5961, + "step": 8972 + }, + { + "epoch": 0.37026491705867787, + "grad_norm": 2.8195844581571063, + "learning_rate": 2.1777864087361305e-06, + "loss": 0.5267, + "step": 8973 + }, + { + "epoch": 0.37030618139803584, + "grad_norm": 2.209958455413738, + "learning_rate": 2.1776075600283456e-06, + "loss": 0.5217, + "step": 8974 + }, + { + "epoch": 0.37034744573739375, + "grad_norm": 2.7483218898658914, + "learning_rate": 2.177428699216794e-06, + "loss": 0.5335, + "step": 8975 + }, + { + "epoch": 0.37038871007675167, + "grad_norm": 2.808483795363212, + "learning_rate": 2.177249826304671e-06, + "loss": 0.5486, + "step": 8976 + }, + { + "epoch": 0.3704299744161096, + "grad_norm": 7.251802231966368, + "learning_rate": 2.177070941295172e-06, + "loss": 0.5276, + "step": 8977 + }, + { + "epoch": 0.3704712387554675, + "grad_norm": 3.02320182613939, + "learning_rate": 2.1768920441914928e-06, + "loss": 0.591, + "step": 8978 + }, + { + "epoch": 0.3705125030948255, + "grad_norm": 2.3035527815088135, + "learning_rate": 2.176713134996828e-06, + "loss": 0.5027, + "step": 8979 + }, + { + "epoch": 0.3705537674341834, + "grad_norm": 2.278535224790387, + "learning_rate": 2.1765342137143734e-06, + "loss": 0.5478, + "step": 8980 + }, + { + "epoch": 0.3705950317735413, + "grad_norm": 5.305088928882234, + "learning_rate": 2.176355280347326e-06, + "loss": 0.4615, + "step": 8981 + }, + { + "epoch": 0.3706362961128992, + "grad_norm": 9.951810333643007, + "learning_rate": 2.1761763348988806e-06, + "loss": 0.5013, + "step": 8982 + }, + { + "epoch": 0.37067756045225714, + "grad_norm": 2.6025515719540193, + "learning_rate": 2.175997377372235e-06, + "loss": 0.4871, + "step": 8983 + }, + { + "epoch": 0.3707188247916151, + "grad_norm": 1.9994699795566144, + "learning_rate": 2.1758184077705848e-06, + "loss": 0.5444, + "step": 8984 + }, + { + "epoch": 0.370760089130973, + "grad_norm": 4.691637781942255, + "learning_rate": 2.1756394260971268e-06, + "loss": 0.5084, + "step": 8985 + }, + { + "epoch": 0.37080135347033094, + "grad_norm": 4.83859156256357, + "learning_rate": 2.1754604323550586e-06, + "loss": 0.5754, + "step": 8986 + }, + { + "epoch": 0.37084261780968886, + "grad_norm": 9.378948905525398, + "learning_rate": 2.175281426547578e-06, + "loss": 0.524, + "step": 8987 + }, + { + "epoch": 0.3708838821490468, + "grad_norm": 2.807920797300318, + "learning_rate": 2.1751024086778813e-06, + "loss": 0.5336, + "step": 8988 + }, + { + "epoch": 0.37092514648840474, + "grad_norm": 3.696182132300901, + "learning_rate": 2.1749233787491668e-06, + "loss": 0.6069, + "step": 8989 + }, + { + "epoch": 0.37096641082776266, + "grad_norm": 3.007549211595685, + "learning_rate": 2.1747443367646327e-06, + "loss": 0.5165, + "step": 8990 + }, + { + "epoch": 0.3710076751671206, + "grad_norm": 3.7537299132912603, + "learning_rate": 2.1745652827274763e-06, + "loss": 0.48, + "step": 8991 + }, + { + "epoch": 0.3710489395064785, + "grad_norm": 6.291886293737962, + "learning_rate": 2.1743862166408967e-06, + "loss": 0.5863, + "step": 8992 + }, + { + "epoch": 0.3710902038458364, + "grad_norm": 2.7104502561526393, + "learning_rate": 2.174207138508092e-06, + "loss": 0.5744, + "step": 8993 + }, + { + "epoch": 0.3711314681851944, + "grad_norm": 2.547409671754595, + "learning_rate": 2.1740280483322613e-06, + "loss": 0.5326, + "step": 8994 + }, + { + "epoch": 0.3711727325245523, + "grad_norm": 3.849096921661957, + "learning_rate": 2.1738489461166036e-06, + "loss": 0.5395, + "step": 8995 + }, + { + "epoch": 0.3712139968639102, + "grad_norm": 3.854459022248729, + "learning_rate": 2.1736698318643183e-06, + "loss": 0.5593, + "step": 8996 + }, + { + "epoch": 0.3712552612032681, + "grad_norm": 2.4422702294130327, + "learning_rate": 2.173490705578604e-06, + "loss": 0.5147, + "step": 8997 + }, + { + "epoch": 0.37129652554262604, + "grad_norm": 2.7358015666768445, + "learning_rate": 2.173311567262661e-06, + "loss": 0.4755, + "step": 8998 + }, + { + "epoch": 0.371337789881984, + "grad_norm": 4.339766067082981, + "learning_rate": 2.1731324169196887e-06, + "loss": 0.5422, + "step": 8999 + }, + { + "epoch": 0.37137905422134193, + "grad_norm": 2.9995769191756785, + "learning_rate": 2.172953254552888e-06, + "loss": 0.4976, + "step": 9000 + }, + { + "epoch": 0.37142031856069985, + "grad_norm": 3.9762235157281225, + "learning_rate": 2.1727740801654588e-06, + "loss": 0.5153, + "step": 9001 + }, + { + "epoch": 0.37146158290005776, + "grad_norm": 7.406342446270186, + "learning_rate": 2.1725948937606015e-06, + "loss": 0.5603, + "step": 9002 + }, + { + "epoch": 0.3715028472394157, + "grad_norm": 2.846121316205191, + "learning_rate": 2.1724156953415165e-06, + "loss": 0.5507, + "step": 9003 + }, + { + "epoch": 0.37154411157877365, + "grad_norm": 3.1863639586973678, + "learning_rate": 2.1722364849114053e-06, + "loss": 0.5562, + "step": 9004 + }, + { + "epoch": 0.37158537591813157, + "grad_norm": 2.957769366403542, + "learning_rate": 2.1720572624734687e-06, + "loss": 0.5364, + "step": 9005 + }, + { + "epoch": 0.3716266402574895, + "grad_norm": 3.5778913861505406, + "learning_rate": 2.171878028030908e-06, + "loss": 0.5398, + "step": 9006 + }, + { + "epoch": 0.3716679045968474, + "grad_norm": 2.2664240143229573, + "learning_rate": 2.1716987815869252e-06, + "loss": 0.4945, + "step": 9007 + }, + { + "epoch": 0.3717091689362053, + "grad_norm": 2.704875421230887, + "learning_rate": 2.1715195231447224e-06, + "loss": 0.5512, + "step": 9008 + }, + { + "epoch": 0.3717504332755633, + "grad_norm": 3.079880774142452, + "learning_rate": 2.1713402527075e-06, + "loss": 0.561, + "step": 9009 + }, + { + "epoch": 0.3717916976149212, + "grad_norm": 2.7594970718418885, + "learning_rate": 2.171160970278462e-06, + "loss": 0.5514, + "step": 9010 + }, + { + "epoch": 0.3718329619542791, + "grad_norm": 4.2041856180309605, + "learning_rate": 2.1709816758608093e-06, + "loss": 0.5381, + "step": 9011 + }, + { + "epoch": 0.37187422629363703, + "grad_norm": 3.2490653549821733, + "learning_rate": 2.170802369457746e-06, + "loss": 0.5483, + "step": 9012 + }, + { + "epoch": 0.37191549063299495, + "grad_norm": 4.22865051494604, + "learning_rate": 2.1706230510724737e-06, + "loss": 0.5052, + "step": 9013 + }, + { + "epoch": 0.37195675497235287, + "grad_norm": 3.15178185794052, + "learning_rate": 2.170443720708197e-06, + "loss": 0.5931, + "step": 9014 + }, + { + "epoch": 0.37199801931171084, + "grad_norm": 3.9745916251479283, + "learning_rate": 2.1702643783681175e-06, + "loss": 0.5015, + "step": 9015 + }, + { + "epoch": 0.37203928365106875, + "grad_norm": 2.4189467713536867, + "learning_rate": 2.1700850240554392e-06, + "loss": 0.5385, + "step": 9016 + }, + { + "epoch": 0.37208054799042667, + "grad_norm": 3.5460375065042493, + "learning_rate": 2.1699056577733663e-06, + "loss": 0.5579, + "step": 9017 + }, + { + "epoch": 0.3721218123297846, + "grad_norm": 3.7460854280200815, + "learning_rate": 2.169726279525103e-06, + "loss": 0.5951, + "step": 9018 + }, + { + "epoch": 0.3721630766691425, + "grad_norm": 2.6895518668275744, + "learning_rate": 2.1695468893138523e-06, + "loss": 0.5525, + "step": 9019 + }, + { + "epoch": 0.37220434100850047, + "grad_norm": 2.487086925003468, + "learning_rate": 2.1693674871428198e-06, + "loss": 0.5571, + "step": 9020 + }, + { + "epoch": 0.3722456053478584, + "grad_norm": 4.848085789031279, + "learning_rate": 2.169188073015209e-06, + "loss": 0.5897, + "step": 9021 + }, + { + "epoch": 0.3722868696872163, + "grad_norm": 2.719677407788274, + "learning_rate": 2.169008646934226e-06, + "loss": 0.4895, + "step": 9022 + }, + { + "epoch": 0.3723281340265742, + "grad_norm": 8.857977317910375, + "learning_rate": 2.168829208903074e-06, + "loss": 0.5615, + "step": 9023 + }, + { + "epoch": 0.37236939836593214, + "grad_norm": 4.655012597415846, + "learning_rate": 2.1686497589249593e-06, + "loss": 0.531, + "step": 9024 + }, + { + "epoch": 0.3724106627052901, + "grad_norm": 3.1278503655610326, + "learning_rate": 2.1684702970030877e-06, + "loss": 0.5332, + "step": 9025 + }, + { + "epoch": 0.372451927044648, + "grad_norm": 2.686550572350732, + "learning_rate": 2.168290823140664e-06, + "loss": 0.5674, + "step": 9026 + }, + { + "epoch": 0.37249319138400594, + "grad_norm": 3.425393462031518, + "learning_rate": 2.1681113373408943e-06, + "loss": 0.539, + "step": 9027 + }, + { + "epoch": 0.37253445572336386, + "grad_norm": 26.586970944817462, + "learning_rate": 2.167931839606985e-06, + "loss": 0.5453, + "step": 9028 + }, + { + "epoch": 0.37257572006272177, + "grad_norm": 4.270247974454648, + "learning_rate": 2.167752329942142e-06, + "loss": 0.5585, + "step": 9029 + }, + { + "epoch": 0.37261698440207974, + "grad_norm": 8.774323919877684, + "learning_rate": 2.167572808349572e-06, + "loss": 0.5774, + "step": 9030 + }, + { + "epoch": 0.37265824874143766, + "grad_norm": 2.685876265125228, + "learning_rate": 2.1673932748324815e-06, + "loss": 0.5871, + "step": 9031 + }, + { + "epoch": 0.3726995130807956, + "grad_norm": 2.4369272577744843, + "learning_rate": 2.1672137293940778e-06, + "loss": 0.539, + "step": 9032 + }, + { + "epoch": 0.3727407774201535, + "grad_norm": 2.815906757983832, + "learning_rate": 2.1670341720375676e-06, + "loss": 0.4668, + "step": 9033 + }, + { + "epoch": 0.3727820417595114, + "grad_norm": 3.040705146249019, + "learning_rate": 2.1668546027661585e-06, + "loss": 0.5693, + "step": 9034 + }, + { + "epoch": 0.3728233060988694, + "grad_norm": 4.6379349910213294, + "learning_rate": 2.166675021583058e-06, + "loss": 0.5851, + "step": 9035 + }, + { + "epoch": 0.3728645704382273, + "grad_norm": 3.644267780147323, + "learning_rate": 2.1664954284914734e-06, + "loss": 0.5607, + "step": 9036 + }, + { + "epoch": 0.3729058347775852, + "grad_norm": 3.813783679050918, + "learning_rate": 2.1663158234946137e-06, + "loss": 0.5922, + "step": 9037 + }, + { + "epoch": 0.3729470991169431, + "grad_norm": 2.291575457715519, + "learning_rate": 2.1661362065956862e-06, + "loss": 0.5507, + "step": 9038 + }, + { + "epoch": 0.37298836345630104, + "grad_norm": 5.33885769928192, + "learning_rate": 2.1659565777979e-06, + "loss": 0.5768, + "step": 9039 + }, + { + "epoch": 0.373029627795659, + "grad_norm": 5.278711159569256, + "learning_rate": 2.1657769371044635e-06, + "loss": 0.5552, + "step": 9040 + }, + { + "epoch": 0.37307089213501693, + "grad_norm": 2.724100841126173, + "learning_rate": 2.165597284518585e-06, + "loss": 0.5577, + "step": 9041 + }, + { + "epoch": 0.37311215647437485, + "grad_norm": 3.5601310432868276, + "learning_rate": 2.1654176200434736e-06, + "loss": 0.5579, + "step": 9042 + }, + { + "epoch": 0.37315342081373276, + "grad_norm": 6.020550201925874, + "learning_rate": 2.1652379436823394e-06, + "loss": 0.5164, + "step": 9043 + }, + { + "epoch": 0.3731946851530907, + "grad_norm": 3.6317643037142746, + "learning_rate": 2.165058255438391e-06, + "loss": 0.57, + "step": 9044 + }, + { + "epoch": 0.37323594949244865, + "grad_norm": 2.7519627154828483, + "learning_rate": 2.1648785553148384e-06, + "loss": 0.5633, + "step": 9045 + }, + { + "epoch": 0.37327721383180656, + "grad_norm": 2.756555699965397, + "learning_rate": 2.1646988433148916e-06, + "loss": 0.4785, + "step": 9046 + }, + { + "epoch": 0.3733184781711645, + "grad_norm": 3.029500627043959, + "learning_rate": 2.1645191194417613e-06, + "loss": 0.4889, + "step": 9047 + }, + { + "epoch": 0.3733597425105224, + "grad_norm": 3.6548054566547696, + "learning_rate": 2.1643393836986565e-06, + "loss": 0.5456, + "step": 9048 + }, + { + "epoch": 0.3734010068498803, + "grad_norm": 3.3940843517419768, + "learning_rate": 2.1641596360887886e-06, + "loss": 0.5034, + "step": 9049 + }, + { + "epoch": 0.3734422711892383, + "grad_norm": 9.649416175727257, + "learning_rate": 2.163979876615368e-06, + "loss": 0.5457, + "step": 9050 + }, + { + "epoch": 0.3734835355285962, + "grad_norm": 5.894591096003536, + "learning_rate": 2.163800105281606e-06, + "loss": 0.5334, + "step": 9051 + }, + { + "epoch": 0.3735247998679541, + "grad_norm": 4.979395238830904, + "learning_rate": 2.163620322090713e-06, + "loss": 0.5036, + "step": 9052 + }, + { + "epoch": 0.37356606420731203, + "grad_norm": 8.168096988842882, + "learning_rate": 2.1634405270459016e-06, + "loss": 0.5208, + "step": 9053 + }, + { + "epoch": 0.37360732854666995, + "grad_norm": 7.670529745130026, + "learning_rate": 2.163260720150382e-06, + "loss": 0.5767, + "step": 9054 + }, + { + "epoch": 0.3736485928860279, + "grad_norm": 13.130842996005967, + "learning_rate": 2.1630809014073675e-06, + "loss": 0.5303, + "step": 9055 + }, + { + "epoch": 0.37368985722538584, + "grad_norm": 2.9780306924357687, + "learning_rate": 2.162901070820069e-06, + "loss": 0.5511, + "step": 9056 + }, + { + "epoch": 0.37373112156474375, + "grad_norm": 4.5224708674307745, + "learning_rate": 2.1627212283916994e-06, + "loss": 0.5586, + "step": 9057 + }, + { + "epoch": 0.37377238590410167, + "grad_norm": 2.591336135284799, + "learning_rate": 2.16254137412547e-06, + "loss": 0.5924, + "step": 9058 + }, + { + "epoch": 0.3738136502434596, + "grad_norm": 2.795855913148659, + "learning_rate": 2.162361508024595e-06, + "loss": 0.5777, + "step": 9059 + }, + { + "epoch": 0.37385491458281755, + "grad_norm": 3.538400951953586, + "learning_rate": 2.162181630092286e-06, + "loss": 0.519, + "step": 9060 + }, + { + "epoch": 0.37389617892217547, + "grad_norm": 3.1088935670313824, + "learning_rate": 2.1620017403317563e-06, + "loss": 0.5341, + "step": 9061 + }, + { + "epoch": 0.3739374432615334, + "grad_norm": 3.157494513299447, + "learning_rate": 2.16182183874622e-06, + "loss": 0.5008, + "step": 9062 + }, + { + "epoch": 0.3739787076008913, + "grad_norm": 3.7044587589184483, + "learning_rate": 2.16164192533889e-06, + "loss": 0.5167, + "step": 9063 + }, + { + "epoch": 0.3740199719402492, + "grad_norm": 2.1953125916767466, + "learning_rate": 2.1614620001129796e-06, + "loss": 0.5275, + "step": 9064 + }, + { + "epoch": 0.3740612362796072, + "grad_norm": 2.930998295335175, + "learning_rate": 2.1612820630717037e-06, + "loss": 0.5431, + "step": 9065 + }, + { + "epoch": 0.3741025006189651, + "grad_norm": 4.496480896514683, + "learning_rate": 2.1611021142182755e-06, + "loss": 0.5499, + "step": 9066 + }, + { + "epoch": 0.374143764958323, + "grad_norm": 2.6390359520475153, + "learning_rate": 2.16092215355591e-06, + "loss": 0.5457, + "step": 9067 + }, + { + "epoch": 0.37418502929768094, + "grad_norm": 2.9170495387094553, + "learning_rate": 2.160742181087821e-06, + "loss": 0.4913, + "step": 9068 + }, + { + "epoch": 0.37422629363703885, + "grad_norm": 1.9870745984618166, + "learning_rate": 2.1605621968172244e-06, + "loss": 0.5044, + "step": 9069 + }, + { + "epoch": 0.3742675579763968, + "grad_norm": 2.342123584469405, + "learning_rate": 2.1603822007473337e-06, + "loss": 0.5059, + "step": 9070 + }, + { + "epoch": 0.37430882231575474, + "grad_norm": 3.6031485428812573, + "learning_rate": 2.1602021928813657e-06, + "loss": 0.5993, + "step": 9071 + }, + { + "epoch": 0.37435008665511266, + "grad_norm": 4.3246041044566175, + "learning_rate": 2.1600221732225345e-06, + "loss": 0.4974, + "step": 9072 + }, + { + "epoch": 0.3743913509944706, + "grad_norm": 8.60942252399164, + "learning_rate": 2.159842141774056e-06, + "loss": 0.6215, + "step": 9073 + }, + { + "epoch": 0.3744326153338285, + "grad_norm": 8.231910504694719, + "learning_rate": 2.1596620985391464e-06, + "loss": 0.5744, + "step": 9074 + }, + { + "epoch": 0.3744738796731864, + "grad_norm": 3.1259326153557985, + "learning_rate": 2.159482043521022e-06, + "loss": 0.5507, + "step": 9075 + }, + { + "epoch": 0.3745151440125444, + "grad_norm": 20.934063816922542, + "learning_rate": 2.159301976722898e-06, + "loss": 0.4788, + "step": 9076 + }, + { + "epoch": 0.3745564083519023, + "grad_norm": 3.3341888676024225, + "learning_rate": 2.159121898147991e-06, + "loss": 0.546, + "step": 9077 + }, + { + "epoch": 0.3745976726912602, + "grad_norm": 5.0022419938831275, + "learning_rate": 2.1589418077995186e-06, + "loss": 0.4962, + "step": 9078 + }, + { + "epoch": 0.3746389370306181, + "grad_norm": 3.8656322218742853, + "learning_rate": 2.1587617056806967e-06, + "loss": 0.4678, + "step": 9079 + }, + { + "epoch": 0.37468020136997604, + "grad_norm": 2.396637606637978, + "learning_rate": 2.1585815917947433e-06, + "loss": 0.5075, + "step": 9080 + }, + { + "epoch": 0.374721465709334, + "grad_norm": 2.8533571630767938, + "learning_rate": 2.158401466144875e-06, + "loss": 0.4807, + "step": 9081 + }, + { + "epoch": 0.3747627300486919, + "grad_norm": 10.95285080596395, + "learning_rate": 2.1582213287343088e-06, + "loss": 0.4989, + "step": 9082 + }, + { + "epoch": 0.37480399438804984, + "grad_norm": 2.826289406025845, + "learning_rate": 2.1580411795662635e-06, + "loss": 0.5428, + "step": 9083 + }, + { + "epoch": 0.37484525872740776, + "grad_norm": 3.4406075152920046, + "learning_rate": 2.1578610186439568e-06, + "loss": 0.4878, + "step": 9084 + }, + { + "epoch": 0.3748865230667657, + "grad_norm": 3.332935852385221, + "learning_rate": 2.157680845970606e-06, + "loss": 0.5026, + "step": 9085 + }, + { + "epoch": 0.37492778740612365, + "grad_norm": 4.6738263283539005, + "learning_rate": 2.1575006615494307e-06, + "loss": 0.5578, + "step": 9086 + }, + { + "epoch": 0.37496905174548156, + "grad_norm": 3.5951662437265126, + "learning_rate": 2.157320465383649e-06, + "loss": 0.5014, + "step": 9087 + }, + { + "epoch": 0.3750103160848395, + "grad_norm": 2.6847718686837747, + "learning_rate": 2.1571402574764788e-06, + "loss": 0.5624, + "step": 9088 + }, + { + "epoch": 0.3750515804241974, + "grad_norm": 4.805083403475933, + "learning_rate": 2.1569600378311397e-06, + "loss": 0.5123, + "step": 9089 + }, + { + "epoch": 0.3750928447635553, + "grad_norm": 2.4879442146563093, + "learning_rate": 2.1567798064508507e-06, + "loss": 0.4905, + "step": 9090 + }, + { + "epoch": 0.3751341091029133, + "grad_norm": 20.846444796269633, + "learning_rate": 2.1565995633388314e-06, + "loss": 0.5229, + "step": 9091 + }, + { + "epoch": 0.3751753734422712, + "grad_norm": 2.101020031084141, + "learning_rate": 2.156419308498302e-06, + "loss": 0.5062, + "step": 9092 + }, + { + "epoch": 0.3752166377816291, + "grad_norm": 2.456448847702318, + "learning_rate": 2.156239041932481e-06, + "loss": 0.5311, + "step": 9093 + }, + { + "epoch": 0.37525790212098703, + "grad_norm": 3.3207950109515703, + "learning_rate": 2.1560587636445895e-06, + "loss": 0.5343, + "step": 9094 + }, + { + "epoch": 0.37529916646034495, + "grad_norm": 6.048035573657194, + "learning_rate": 2.155878473637847e-06, + "loss": 0.5754, + "step": 9095 + }, + { + "epoch": 0.3753404307997029, + "grad_norm": 3.642453595049669, + "learning_rate": 2.155698171915474e-06, + "loss": 0.5228, + "step": 9096 + }, + { + "epoch": 0.37538169513906083, + "grad_norm": 6.877953223545137, + "learning_rate": 2.1555178584806913e-06, + "loss": 0.539, + "step": 9097 + }, + { + "epoch": 0.37542295947841875, + "grad_norm": 8.089902299705301, + "learning_rate": 2.1553375333367206e-06, + "loss": 0.5799, + "step": 9098 + }, + { + "epoch": 0.37546422381777667, + "grad_norm": 2.901676059257316, + "learning_rate": 2.1551571964867812e-06, + "loss": 0.5799, + "step": 9099 + }, + { + "epoch": 0.3755054881571346, + "grad_norm": 9.229036384581923, + "learning_rate": 2.154976847934096e-06, + "loss": 0.5746, + "step": 9100 + }, + { + "epoch": 0.37554675249649255, + "grad_norm": 5.3763217413005515, + "learning_rate": 2.1547964876818854e-06, + "loss": 0.5414, + "step": 9101 + }, + { + "epoch": 0.37558801683585047, + "grad_norm": 10.80426389230155, + "learning_rate": 2.1546161157333715e-06, + "loss": 0.4853, + "step": 9102 + }, + { + "epoch": 0.3756292811752084, + "grad_norm": 12.40049093141847, + "learning_rate": 2.1544357320917765e-06, + "loss": 0.5289, + "step": 9103 + }, + { + "epoch": 0.3756705455145663, + "grad_norm": 2.4859038850688377, + "learning_rate": 2.154255336760322e-06, + "loss": 0.5341, + "step": 9104 + }, + { + "epoch": 0.3757118098539242, + "grad_norm": 2.016989138673731, + "learning_rate": 2.1540749297422303e-06, + "loss": 0.4758, + "step": 9105 + }, + { + "epoch": 0.3757530741932822, + "grad_norm": 4.59085249979038, + "learning_rate": 2.1538945110407244e-06, + "loss": 0.5354, + "step": 9106 + }, + { + "epoch": 0.3757943385326401, + "grad_norm": 2.8150592220363504, + "learning_rate": 2.1537140806590264e-06, + "loss": 0.5259, + "step": 9107 + }, + { + "epoch": 0.375835602871998, + "grad_norm": 3.8770275784798294, + "learning_rate": 2.15353363860036e-06, + "loss": 0.6074, + "step": 9108 + }, + { + "epoch": 0.37587686721135594, + "grad_norm": 2.6912813275189653, + "learning_rate": 2.1533531848679475e-06, + "loss": 0.5406, + "step": 9109 + }, + { + "epoch": 0.37591813155071385, + "grad_norm": 2.5799252748314108, + "learning_rate": 2.153172719465013e-06, + "loss": 0.5219, + "step": 9110 + }, + { + "epoch": 0.3759593958900718, + "grad_norm": 2.96155407877448, + "learning_rate": 2.1529922423947796e-06, + "loss": 0.5441, + "step": 9111 + }, + { + "epoch": 0.37600066022942974, + "grad_norm": 6.804542618017786, + "learning_rate": 2.152811753660471e-06, + "loss": 0.5267, + "step": 9112 + }, + { + "epoch": 0.37604192456878766, + "grad_norm": 3.4326049240296386, + "learning_rate": 2.152631253265312e-06, + "loss": 0.5071, + "step": 9113 + }, + { + "epoch": 0.37608318890814557, + "grad_norm": 9.73415299306516, + "learning_rate": 2.1524507412125253e-06, + "loss": 0.5336, + "step": 9114 + }, + { + "epoch": 0.3761244532475035, + "grad_norm": 2.0604119290709133, + "learning_rate": 2.152270217505337e-06, + "loss": 0.5201, + "step": 9115 + }, + { + "epoch": 0.37616571758686146, + "grad_norm": 2.397360043081465, + "learning_rate": 2.15208968214697e-06, + "loss": 0.552, + "step": 9116 + }, + { + "epoch": 0.3762069819262194, + "grad_norm": 2.444247436215374, + "learning_rate": 2.151909135140651e-06, + "loss": 0.5043, + "step": 9117 + }, + { + "epoch": 0.3762482462655773, + "grad_norm": 3.294387483224259, + "learning_rate": 2.1517285764896037e-06, + "loss": 0.512, + "step": 9118 + }, + { + "epoch": 0.3762895106049352, + "grad_norm": 3.9014063788345936, + "learning_rate": 2.1515480061970537e-06, + "loss": 0.59, + "step": 9119 + }, + { + "epoch": 0.3763307749442931, + "grad_norm": 4.700192366000636, + "learning_rate": 2.151367424266226e-06, + "loss": 0.548, + "step": 9120 + }, + { + "epoch": 0.3763720392836511, + "grad_norm": 3.5160069876970454, + "learning_rate": 2.1511868307003473e-06, + "loss": 0.56, + "step": 9121 + }, + { + "epoch": 0.376413303623009, + "grad_norm": 2.868608404093014, + "learning_rate": 2.151006225502642e-06, + "loss": 0.5321, + "step": 9122 + }, + { + "epoch": 0.3764545679623669, + "grad_norm": 4.707717620748138, + "learning_rate": 2.150825608676337e-06, + "loss": 0.5342, + "step": 9123 + }, + { + "epoch": 0.37649583230172484, + "grad_norm": 2.67128386904675, + "learning_rate": 2.150644980224659e-06, + "loss": 0.4982, + "step": 9124 + }, + { + "epoch": 0.37653709664108276, + "grad_norm": 3.72602376521629, + "learning_rate": 2.150464340150834e-06, + "loss": 0.4781, + "step": 9125 + }, + { + "epoch": 0.37657836098044073, + "grad_norm": 3.229279777892923, + "learning_rate": 2.1502836884580886e-06, + "loss": 0.5012, + "step": 9126 + }, + { + "epoch": 0.37661962531979865, + "grad_norm": 2.8006316608429342, + "learning_rate": 2.1501030251496494e-06, + "loss": 0.5169, + "step": 9127 + }, + { + "epoch": 0.37666088965915656, + "grad_norm": 5.637215723732374, + "learning_rate": 2.149922350228744e-06, + "loss": 0.5013, + "step": 9128 + }, + { + "epoch": 0.3767021539985145, + "grad_norm": 4.642432501134771, + "learning_rate": 2.1497416636985997e-06, + "loss": 0.4947, + "step": 9129 + }, + { + "epoch": 0.3767434183378724, + "grad_norm": 3.270195445924113, + "learning_rate": 2.1495609655624433e-06, + "loss": 0.4859, + "step": 9130 + }, + { + "epoch": 0.37678468267723036, + "grad_norm": 48.62095058276192, + "learning_rate": 2.1493802558235042e-06, + "loss": 0.5502, + "step": 9131 + }, + { + "epoch": 0.3768259470165883, + "grad_norm": 5.249486234513707, + "learning_rate": 2.1491995344850084e-06, + "loss": 0.5365, + "step": 9132 + }, + { + "epoch": 0.3768672113559462, + "grad_norm": 2.088413825972844, + "learning_rate": 2.1490188015501856e-06, + "loss": 0.5628, + "step": 9133 + }, + { + "epoch": 0.3769084756953041, + "grad_norm": 2.1726921748690193, + "learning_rate": 2.148838057022263e-06, + "loss": 0.5315, + "step": 9134 + }, + { + "epoch": 0.37694974003466203, + "grad_norm": 2.5682343995844006, + "learning_rate": 2.1486573009044694e-06, + "loss": 0.596, + "step": 9135 + }, + { + "epoch": 0.37699100437401994, + "grad_norm": 5.957666124042135, + "learning_rate": 2.1484765332000333e-06, + "loss": 0.5087, + "step": 9136 + }, + { + "epoch": 0.3770322687133779, + "grad_norm": 2.4272525086633463, + "learning_rate": 2.148295753912185e-06, + "loss": 0.5355, + "step": 9137 + }, + { + "epoch": 0.37707353305273583, + "grad_norm": 3.332158944800508, + "learning_rate": 2.148114963044152e-06, + "loss": 0.4867, + "step": 9138 + }, + { + "epoch": 0.37711479739209375, + "grad_norm": 4.543487251668345, + "learning_rate": 2.1479341605991653e-06, + "loss": 0.5398, + "step": 9139 + }, + { + "epoch": 0.37715606173145166, + "grad_norm": 8.871255596300717, + "learning_rate": 2.1477533465804526e-06, + "loss": 0.5358, + "step": 9140 + }, + { + "epoch": 0.3771973260708096, + "grad_norm": 4.405531555568895, + "learning_rate": 2.1475725209912456e-06, + "loss": 0.5356, + "step": 9141 + }, + { + "epoch": 0.37723859041016755, + "grad_norm": 2.4501742936004414, + "learning_rate": 2.147391683834773e-06, + "loss": 0.4778, + "step": 9142 + }, + { + "epoch": 0.37727985474952547, + "grad_norm": 5.237850066237279, + "learning_rate": 2.147210835114265e-06, + "loss": 0.5595, + "step": 9143 + }, + { + "epoch": 0.3773211190888834, + "grad_norm": 6.1403855226917425, + "learning_rate": 2.1470299748329528e-06, + "loss": 0.5062, + "step": 9144 + }, + { + "epoch": 0.3773623834282413, + "grad_norm": 6.208082493278176, + "learning_rate": 2.146849102994067e-06, + "loss": 0.5957, + "step": 9145 + }, + { + "epoch": 0.3774036477675992, + "grad_norm": 3.3798089341797346, + "learning_rate": 2.1466682196008375e-06, + "loss": 0.6093, + "step": 9146 + }, + { + "epoch": 0.3774449121069572, + "grad_norm": 3.7784233988739757, + "learning_rate": 2.146487324656496e-06, + "loss": 0.5019, + "step": 9147 + }, + { + "epoch": 0.3774861764463151, + "grad_norm": 21.17859910250989, + "learning_rate": 2.1463064181642734e-06, + "loss": 0.5205, + "step": 9148 + }, + { + "epoch": 0.377527440785673, + "grad_norm": 3.460027485414604, + "learning_rate": 2.1461255001274016e-06, + "loss": 0.5383, + "step": 9149 + }, + { + "epoch": 0.37756870512503093, + "grad_norm": 5.23669306565453, + "learning_rate": 2.145944570549112e-06, + "loss": 0.5409, + "step": 9150 + }, + { + "epoch": 0.37760996946438885, + "grad_norm": 2.9480398242096615, + "learning_rate": 2.145763629432637e-06, + "loss": 0.5406, + "step": 9151 + }, + { + "epoch": 0.3776512338037468, + "grad_norm": 6.962847593994627, + "learning_rate": 2.1455826767812076e-06, + "loss": 0.4779, + "step": 9152 + }, + { + "epoch": 0.37769249814310474, + "grad_norm": 5.052555721430263, + "learning_rate": 2.145401712598056e-06, + "loss": 0.527, + "step": 9153 + }, + { + "epoch": 0.37773376248246265, + "grad_norm": 2.412016411246581, + "learning_rate": 2.1452207368864162e-06, + "loss": 0.5277, + "step": 9154 + }, + { + "epoch": 0.37777502682182057, + "grad_norm": 6.954175052884674, + "learning_rate": 2.1450397496495196e-06, + "loss": 0.5067, + "step": 9155 + }, + { + "epoch": 0.3778162911611785, + "grad_norm": 2.7365433668719277, + "learning_rate": 2.1448587508905993e-06, + "loss": 0.5232, + "step": 9156 + }, + { + "epoch": 0.37785755550053646, + "grad_norm": 8.128507518949188, + "learning_rate": 2.1446777406128886e-06, + "loss": 0.5429, + "step": 9157 + }, + { + "epoch": 0.3778988198398944, + "grad_norm": 4.384403189106096, + "learning_rate": 2.144496718819621e-06, + "loss": 0.5171, + "step": 9158 + }, + { + "epoch": 0.3779400841792523, + "grad_norm": 4.283123539871204, + "learning_rate": 2.1443156855140296e-06, + "loss": 0.5834, + "step": 9159 + }, + { + "epoch": 0.3779813485186102, + "grad_norm": 3.3724292052972684, + "learning_rate": 2.144134640699348e-06, + "loss": 0.5569, + "step": 9160 + }, + { + "epoch": 0.3780226128579681, + "grad_norm": 7.713673662984035, + "learning_rate": 2.1439535843788104e-06, + "loss": 0.5497, + "step": 9161 + }, + { + "epoch": 0.3780638771973261, + "grad_norm": 3.8470526812294965, + "learning_rate": 2.143772516555651e-06, + "loss": 0.5044, + "step": 9162 + }, + { + "epoch": 0.378105141536684, + "grad_norm": 5.13185522508065, + "learning_rate": 2.1435914372331037e-06, + "loss": 0.5328, + "step": 9163 + }, + { + "epoch": 0.3781464058760419, + "grad_norm": 1.7773629307688132, + "learning_rate": 2.143410346414404e-06, + "loss": 0.4951, + "step": 9164 + }, + { + "epoch": 0.37818767021539984, + "grad_norm": 2.8139253397686437, + "learning_rate": 2.143229244102785e-06, + "loss": 0.529, + "step": 9165 + }, + { + "epoch": 0.37822893455475776, + "grad_norm": 4.028888128258913, + "learning_rate": 2.1430481303014835e-06, + "loss": 0.5275, + "step": 9166 + }, + { + "epoch": 0.37827019889411573, + "grad_norm": 4.415975777021817, + "learning_rate": 2.142867005013733e-06, + "loss": 0.5058, + "step": 9167 + }, + { + "epoch": 0.37831146323347364, + "grad_norm": 5.269277807662818, + "learning_rate": 2.14268586824277e-06, + "loss": 0.5194, + "step": 9168 + }, + { + "epoch": 0.37835272757283156, + "grad_norm": 2.6204858380404037, + "learning_rate": 2.142504719991829e-06, + "loss": 0.5477, + "step": 9169 + }, + { + "epoch": 0.3783939919121895, + "grad_norm": 3.656000430246408, + "learning_rate": 2.1423235602641474e-06, + "loss": 0.5172, + "step": 9170 + }, + { + "epoch": 0.3784352562515474, + "grad_norm": 2.100722557460165, + "learning_rate": 2.1421423890629597e-06, + "loss": 0.526, + "step": 9171 + }, + { + "epoch": 0.37847652059090536, + "grad_norm": 7.200290692900821, + "learning_rate": 2.1419612063915025e-06, + "loss": 0.5347, + "step": 9172 + }, + { + "epoch": 0.3785177849302633, + "grad_norm": 4.501486994472814, + "learning_rate": 2.1417800122530124e-06, + "loss": 0.5403, + "step": 9173 + }, + { + "epoch": 0.3785590492696212, + "grad_norm": 3.352008967240683, + "learning_rate": 2.1415988066507255e-06, + "loss": 0.5111, + "step": 9174 + }, + { + "epoch": 0.3786003136089791, + "grad_norm": 5.02276944676683, + "learning_rate": 2.141417589587879e-06, + "loss": 0.5312, + "step": 9175 + }, + { + "epoch": 0.378641577948337, + "grad_norm": 12.799275840183025, + "learning_rate": 2.1412363610677097e-06, + "loss": 0.5556, + "step": 9176 + }, + { + "epoch": 0.378682842287695, + "grad_norm": 2.0719036186754005, + "learning_rate": 2.1410551210934547e-06, + "loss": 0.4546, + "step": 9177 + }, + { + "epoch": 0.3787241066270529, + "grad_norm": 2.5167710170704725, + "learning_rate": 2.1408738696683523e-06, + "loss": 0.5427, + "step": 9178 + }, + { + "epoch": 0.37876537096641083, + "grad_norm": 6.118769839802708, + "learning_rate": 2.1406926067956386e-06, + "loss": 0.5346, + "step": 9179 + }, + { + "epoch": 0.37880663530576875, + "grad_norm": 2.9993626325068865, + "learning_rate": 2.1405113324785526e-06, + "loss": 0.5484, + "step": 9180 + }, + { + "epoch": 0.37884789964512666, + "grad_norm": 4.092684536893083, + "learning_rate": 2.1403300467203317e-06, + "loss": 0.5278, + "step": 9181 + }, + { + "epoch": 0.37888916398448463, + "grad_norm": 2.9372146892218614, + "learning_rate": 2.140148749524214e-06, + "loss": 0.5137, + "step": 9182 + }, + { + "epoch": 0.37893042832384255, + "grad_norm": 29.193997802609275, + "learning_rate": 2.139967440893439e-06, + "loss": 0.529, + "step": 9183 + }, + { + "epoch": 0.37897169266320047, + "grad_norm": 4.027423783549301, + "learning_rate": 2.139786120831244e-06, + "loss": 0.5324, + "step": 9184 + }, + { + "epoch": 0.3790129570025584, + "grad_norm": 8.55554885006222, + "learning_rate": 2.1396047893408686e-06, + "loss": 0.4984, + "step": 9185 + }, + { + "epoch": 0.3790542213419163, + "grad_norm": 4.43428141305553, + "learning_rate": 2.1394234464255514e-06, + "loss": 0.5582, + "step": 9186 + }, + { + "epoch": 0.37909548568127427, + "grad_norm": 5.017896225858453, + "learning_rate": 2.139242092088532e-06, + "loss": 0.4932, + "step": 9187 + }, + { + "epoch": 0.3791367500206322, + "grad_norm": 4.417513534878335, + "learning_rate": 2.1390607263330496e-06, + "loss": 0.5952, + "step": 9188 + }, + { + "epoch": 0.3791780143599901, + "grad_norm": 1.6244935053164336, + "learning_rate": 2.138879349162344e-06, + "loss": 0.5547, + "step": 9189 + }, + { + "epoch": 0.379219278699348, + "grad_norm": 2.8100883734533015, + "learning_rate": 2.138697960579655e-06, + "loss": 0.5451, + "step": 9190 + }, + { + "epoch": 0.37926054303870593, + "grad_norm": 7.389200018909367, + "learning_rate": 2.1385165605882223e-06, + "loss": 0.5046, + "step": 9191 + }, + { + "epoch": 0.3793018073780639, + "grad_norm": 14.919692043900906, + "learning_rate": 2.1383351491912874e-06, + "loss": 0.5392, + "step": 9192 + }, + { + "epoch": 0.3793430717174218, + "grad_norm": 10.34243533318825, + "learning_rate": 2.1381537263920888e-06, + "loss": 0.4603, + "step": 9193 + }, + { + "epoch": 0.37938433605677974, + "grad_norm": 9.570979960577706, + "learning_rate": 2.137972292193869e-06, + "loss": 0.5113, + "step": 9194 + }, + { + "epoch": 0.37942560039613765, + "grad_norm": 3.245813628870902, + "learning_rate": 2.137790846599868e-06, + "loss": 0.4772, + "step": 9195 + }, + { + "epoch": 0.37946686473549557, + "grad_norm": 3.2568224188938815, + "learning_rate": 2.1376093896133265e-06, + "loss": 0.5571, + "step": 9196 + }, + { + "epoch": 0.3795081290748535, + "grad_norm": 37.59348650093847, + "learning_rate": 2.1374279212374865e-06, + "loss": 0.5249, + "step": 9197 + }, + { + "epoch": 0.37954939341421146, + "grad_norm": 3.0048991541215933, + "learning_rate": 2.1372464414755894e-06, + "loss": 0.5718, + "step": 9198 + }, + { + "epoch": 0.37959065775356937, + "grad_norm": 7.52986118186117, + "learning_rate": 2.137064950330877e-06, + "loss": 0.5698, + "step": 9199 + }, + { + "epoch": 0.3796319220929273, + "grad_norm": 5.454911261121366, + "learning_rate": 2.13688344780659e-06, + "loss": 0.5553, + "step": 9200 + }, + { + "epoch": 0.3796731864322852, + "grad_norm": 3.3531742695418925, + "learning_rate": 2.136701933905972e-06, + "loss": 0.4555, + "step": 9201 + }, + { + "epoch": 0.3797144507716431, + "grad_norm": 3.6441469451142994, + "learning_rate": 2.1365204086322645e-06, + "loss": 0.5369, + "step": 9202 + }, + { + "epoch": 0.3797557151110011, + "grad_norm": 6.320613345980772, + "learning_rate": 2.13633887198871e-06, + "loss": 0.5027, + "step": 9203 + }, + { + "epoch": 0.379796979450359, + "grad_norm": 3.464387566680476, + "learning_rate": 2.1361573239785516e-06, + "loss": 0.5903, + "step": 9204 + }, + { + "epoch": 0.3798382437897169, + "grad_norm": 5.243397297257435, + "learning_rate": 2.1359757646050324e-06, + "loss": 0.4933, + "step": 9205 + }, + { + "epoch": 0.37987950812907484, + "grad_norm": 3.2452271248568954, + "learning_rate": 2.1357941938713944e-06, + "loss": 0.5151, + "step": 9206 + }, + { + "epoch": 0.37992077246843275, + "grad_norm": 3.4180917063594487, + "learning_rate": 2.135612611780882e-06, + "loss": 0.5118, + "step": 9207 + }, + { + "epoch": 0.3799620368077907, + "grad_norm": 3.8850698060841675, + "learning_rate": 2.135431018336738e-06, + "loss": 0.5406, + "step": 9208 + }, + { + "epoch": 0.38000330114714864, + "grad_norm": 2.637917864992105, + "learning_rate": 2.135249413542207e-06, + "loss": 0.5819, + "step": 9209 + }, + { + "epoch": 0.38004456548650656, + "grad_norm": 2.2063972043245537, + "learning_rate": 2.135067797400531e-06, + "loss": 0.5056, + "step": 9210 + }, + { + "epoch": 0.3800858298258645, + "grad_norm": 4.702066681329117, + "learning_rate": 2.1348861699149567e-06, + "loss": 0.5642, + "step": 9211 + }, + { + "epoch": 0.3801270941652224, + "grad_norm": 2.507872538620254, + "learning_rate": 2.134704531088727e-06, + "loss": 0.5071, + "step": 9212 + }, + { + "epoch": 0.38016835850458036, + "grad_norm": 2.724447843795767, + "learning_rate": 2.1345228809250862e-06, + "loss": 0.5342, + "step": 9213 + }, + { + "epoch": 0.3802096228439383, + "grad_norm": 2.934381807294081, + "learning_rate": 2.134341219427279e-06, + "loss": 0.5526, + "step": 9214 + }, + { + "epoch": 0.3802508871832962, + "grad_norm": 3.310453673622289, + "learning_rate": 2.134159546598552e-06, + "loss": 0.561, + "step": 9215 + }, + { + "epoch": 0.3802921515226541, + "grad_norm": 12.030317601142741, + "learning_rate": 2.133977862442148e-06, + "loss": 0.5231, + "step": 9216 + }, + { + "epoch": 0.380333415862012, + "grad_norm": 3.164203517153814, + "learning_rate": 2.133796166961314e-06, + "loss": 0.566, + "step": 9217 + }, + { + "epoch": 0.38037468020137, + "grad_norm": 2.154707865809556, + "learning_rate": 2.133614460159295e-06, + "loss": 0.4862, + "step": 9218 + }, + { + "epoch": 0.3804159445407279, + "grad_norm": 3.752204739476194, + "learning_rate": 2.133432742039336e-06, + "loss": 0.5128, + "step": 9219 + }, + { + "epoch": 0.38045720888008583, + "grad_norm": 3.1069128421522083, + "learning_rate": 2.133251012604684e-06, + "loss": 0.5183, + "step": 9220 + }, + { + "epoch": 0.38049847321944374, + "grad_norm": 3.914884361013288, + "learning_rate": 2.1330692718585848e-06, + "loss": 0.5064, + "step": 9221 + }, + { + "epoch": 0.38053973755880166, + "grad_norm": 2.4372122125443836, + "learning_rate": 2.1328875198042846e-06, + "loss": 0.5363, + "step": 9222 + }, + { + "epoch": 0.38058100189815963, + "grad_norm": 2.518312587877174, + "learning_rate": 2.1327057564450303e-06, + "loss": 0.4881, + "step": 9223 + }, + { + "epoch": 0.38062226623751755, + "grad_norm": 2.6946675085059946, + "learning_rate": 2.1325239817840683e-06, + "loss": 0.5875, + "step": 9224 + }, + { + "epoch": 0.38066353057687546, + "grad_norm": 2.87439310090579, + "learning_rate": 2.1323421958246456e-06, + "loss": 0.5538, + "step": 9225 + }, + { + "epoch": 0.3807047949162334, + "grad_norm": 5.107783752612172, + "learning_rate": 2.1321603985700094e-06, + "loss": 0.5349, + "step": 9226 + }, + { + "epoch": 0.3807460592555913, + "grad_norm": 5.238884705344351, + "learning_rate": 2.131978590023407e-06, + "loss": 0.5533, + "step": 9227 + }, + { + "epoch": 0.38078732359494927, + "grad_norm": 18.999652022436567, + "learning_rate": 2.1317967701880858e-06, + "loss": 0.5357, + "step": 9228 + }, + { + "epoch": 0.3808285879343072, + "grad_norm": 2.3818030523096, + "learning_rate": 2.1316149390672944e-06, + "loss": 0.5497, + "step": 9229 + }, + { + "epoch": 0.3808698522736651, + "grad_norm": 4.512833689407881, + "learning_rate": 2.1314330966642795e-06, + "loss": 0.5633, + "step": 9230 + }, + { + "epoch": 0.380911116613023, + "grad_norm": 4.309296433360006, + "learning_rate": 2.1312512429822903e-06, + "loss": 0.5501, + "step": 9231 + }, + { + "epoch": 0.38095238095238093, + "grad_norm": 3.574642962429458, + "learning_rate": 2.131069378024575e-06, + "loss": 0.536, + "step": 9232 + }, + { + "epoch": 0.3809936452917389, + "grad_norm": 3.733778178127669, + "learning_rate": 2.130887501794381e-06, + "loss": 0.5599, + "step": 9233 + }, + { + "epoch": 0.3810349096310968, + "grad_norm": 3.086884416071604, + "learning_rate": 2.130705614294959e-06, + "loss": 0.5366, + "step": 9234 + }, + { + "epoch": 0.38107617397045473, + "grad_norm": 8.72738851876913, + "learning_rate": 2.1305237155295564e-06, + "loss": 0.5522, + "step": 9235 + }, + { + "epoch": 0.38111743830981265, + "grad_norm": 7.074565181149387, + "learning_rate": 2.1303418055014224e-06, + "loss": 0.5513, + "step": 9236 + }, + { + "epoch": 0.38115870264917057, + "grad_norm": 5.215988551508416, + "learning_rate": 2.1301598842138078e-06, + "loss": 0.5053, + "step": 9237 + }, + { + "epoch": 0.38119996698852854, + "grad_norm": 3.5087743190642526, + "learning_rate": 2.129977951669961e-06, + "loss": 0.4667, + "step": 9238 + }, + { + "epoch": 0.38124123132788645, + "grad_norm": 4.85683845462351, + "learning_rate": 2.1297960078731317e-06, + "loss": 0.5405, + "step": 9239 + }, + { + "epoch": 0.38128249566724437, + "grad_norm": 3.887658224194143, + "learning_rate": 2.1296140528265704e-06, + "loss": 0.5024, + "step": 9240 + }, + { + "epoch": 0.3813237600066023, + "grad_norm": 3.8958295185679432, + "learning_rate": 2.129432086533527e-06, + "loss": 0.5344, + "step": 9241 + }, + { + "epoch": 0.3813650243459602, + "grad_norm": 6.374855147809509, + "learning_rate": 2.129250108997252e-06, + "loss": 0.5454, + "step": 9242 + }, + { + "epoch": 0.3814062886853182, + "grad_norm": 2.4538951879831865, + "learning_rate": 2.129068120220996e-06, + "loss": 0.5766, + "step": 9243 + }, + { + "epoch": 0.3814475530246761, + "grad_norm": 5.155562360786885, + "learning_rate": 2.12888612020801e-06, + "loss": 0.5618, + "step": 9244 + }, + { + "epoch": 0.381488817364034, + "grad_norm": 4.109927853641082, + "learning_rate": 2.1287041089615443e-06, + "loss": 0.5193, + "step": 9245 + }, + { + "epoch": 0.3815300817033919, + "grad_norm": 2.221480281903641, + "learning_rate": 2.12852208648485e-06, + "loss": 0.5351, + "step": 9246 + }, + { + "epoch": 0.38157134604274984, + "grad_norm": 2.981017536410537, + "learning_rate": 2.128340052781179e-06, + "loss": 0.4911, + "step": 9247 + }, + { + "epoch": 0.3816126103821078, + "grad_norm": 2.6425018702328313, + "learning_rate": 2.1281580078537834e-06, + "loss": 0.5417, + "step": 9248 + }, + { + "epoch": 0.3816538747214657, + "grad_norm": 5.734473917141244, + "learning_rate": 2.127975951705914e-06, + "loss": 0.5508, + "step": 9249 + }, + { + "epoch": 0.38169513906082364, + "grad_norm": 5.6687744663435184, + "learning_rate": 2.1277938843408236e-06, + "loss": 0.5783, + "step": 9250 + }, + { + "epoch": 0.38173640340018156, + "grad_norm": 3.050036265455807, + "learning_rate": 2.1276118057617635e-06, + "loss": 0.4948, + "step": 9251 + }, + { + "epoch": 0.3817776677395395, + "grad_norm": 5.103467676590424, + "learning_rate": 2.1274297159719865e-06, + "loss": 0.5803, + "step": 9252 + }, + { + "epoch": 0.38181893207889744, + "grad_norm": 2.155776319337978, + "learning_rate": 2.1272476149747455e-06, + "loss": 0.5177, + "step": 9253 + }, + { + "epoch": 0.38186019641825536, + "grad_norm": 6.180964829190715, + "learning_rate": 2.1270655027732923e-06, + "loss": 0.5255, + "step": 9254 + }, + { + "epoch": 0.3819014607576133, + "grad_norm": 2.878290489370657, + "learning_rate": 2.126883379370881e-06, + "loss": 0.5284, + "step": 9255 + }, + { + "epoch": 0.3819427250969712, + "grad_norm": 2.450872688321426, + "learning_rate": 2.1267012447707646e-06, + "loss": 0.5045, + "step": 9256 + }, + { + "epoch": 0.3819839894363291, + "grad_norm": 6.450380807255017, + "learning_rate": 2.1265190989761956e-06, + "loss": 0.5222, + "step": 9257 + }, + { + "epoch": 0.382025253775687, + "grad_norm": 4.487165292059583, + "learning_rate": 2.1263369419904288e-06, + "loss": 0.5833, + "step": 9258 + }, + { + "epoch": 0.382066518115045, + "grad_norm": 2.9447364764900152, + "learning_rate": 2.1261547738167166e-06, + "loss": 0.5423, + "step": 9259 + }, + { + "epoch": 0.3821077824544029, + "grad_norm": 2.313474758327097, + "learning_rate": 2.1259725944583142e-06, + "loss": 0.5839, + "step": 9260 + }, + { + "epoch": 0.3821490467937608, + "grad_norm": 3.139808271838644, + "learning_rate": 2.125790403918475e-06, + "loss": 0.5398, + "step": 9261 + }, + { + "epoch": 0.38219031113311874, + "grad_norm": 2.212207259962849, + "learning_rate": 2.125608202200454e-06, + "loss": 0.5294, + "step": 9262 + }, + { + "epoch": 0.38223157547247666, + "grad_norm": 3.202335983751284, + "learning_rate": 2.1254259893075057e-06, + "loss": 0.5216, + "step": 9263 + }, + { + "epoch": 0.38227283981183463, + "grad_norm": 3.111745972815698, + "learning_rate": 2.125243765242885e-06, + "loss": 0.515, + "step": 9264 + }, + { + "epoch": 0.38231410415119255, + "grad_norm": 3.2383974549664467, + "learning_rate": 2.1250615300098453e-06, + "loss": 0.501, + "step": 9265 + }, + { + "epoch": 0.38235536849055046, + "grad_norm": 3.7506284389672033, + "learning_rate": 2.124879283611644e-06, + "loss": 0.5139, + "step": 9266 + }, + { + "epoch": 0.3823966328299084, + "grad_norm": 2.233896663026597, + "learning_rate": 2.124697026051535e-06, + "loss": 0.5507, + "step": 9267 + }, + { + "epoch": 0.3824378971692663, + "grad_norm": 7.139222276292758, + "learning_rate": 2.1245147573327745e-06, + "loss": 0.4979, + "step": 9268 + }, + { + "epoch": 0.38247916150862427, + "grad_norm": 5.38250819482268, + "learning_rate": 2.124332477458618e-06, + "loss": 0.6067, + "step": 9269 + }, + { + "epoch": 0.3825204258479822, + "grad_norm": 2.145639778196379, + "learning_rate": 2.1241501864323217e-06, + "loss": 0.5529, + "step": 9270 + }, + { + "epoch": 0.3825616901873401, + "grad_norm": 4.993648173088754, + "learning_rate": 2.1239678842571417e-06, + "loss": 0.5441, + "step": 9271 + }, + { + "epoch": 0.382602954526698, + "grad_norm": 2.428067993545109, + "learning_rate": 2.1237855709363342e-06, + "loss": 0.4972, + "step": 9272 + }, + { + "epoch": 0.38264421886605593, + "grad_norm": 7.609485504676934, + "learning_rate": 2.1236032464731564e-06, + "loss": 0.5848, + "step": 9273 + }, + { + "epoch": 0.3826854832054139, + "grad_norm": 4.736296512258362, + "learning_rate": 2.1234209108708643e-06, + "loss": 0.5465, + "step": 9274 + }, + { + "epoch": 0.3827267475447718, + "grad_norm": 2.373687762674338, + "learning_rate": 2.1232385641327152e-06, + "loss": 0.5721, + "step": 9275 + }, + { + "epoch": 0.38276801188412973, + "grad_norm": 3.2755615752367353, + "learning_rate": 2.1230562062619663e-06, + "loss": 0.5938, + "step": 9276 + }, + { + "epoch": 0.38280927622348765, + "grad_norm": 10.349650368782115, + "learning_rate": 2.122873837261875e-06, + "loss": 0.476, + "step": 9277 + }, + { + "epoch": 0.38285054056284556, + "grad_norm": 5.382942317953953, + "learning_rate": 2.1226914571356983e-06, + "loss": 0.4638, + "step": 9278 + }, + { + "epoch": 0.38289180490220354, + "grad_norm": 34.170593861045845, + "learning_rate": 2.1225090658866946e-06, + "loss": 0.5822, + "step": 9279 + }, + { + "epoch": 0.38293306924156145, + "grad_norm": 2.879004185830326, + "learning_rate": 2.1223266635181223e-06, + "loss": 0.5283, + "step": 9280 + }, + { + "epoch": 0.38297433358091937, + "grad_norm": 2.2520629183536762, + "learning_rate": 2.1221442500332386e-06, + "loss": 0.5286, + "step": 9281 + }, + { + "epoch": 0.3830155979202773, + "grad_norm": 35.89686463563407, + "learning_rate": 2.1219618254353018e-06, + "loss": 0.5153, + "step": 9282 + }, + { + "epoch": 0.3830568622596352, + "grad_norm": 5.9816530621715005, + "learning_rate": 2.1217793897275715e-06, + "loss": 0.5979, + "step": 9283 + }, + { + "epoch": 0.38309812659899317, + "grad_norm": 11.968907348156598, + "learning_rate": 2.1215969429133055e-06, + "loss": 0.4837, + "step": 9284 + }, + { + "epoch": 0.3831393909383511, + "grad_norm": 3.7124577764011706, + "learning_rate": 2.121414484995763e-06, + "loss": 0.529, + "step": 9285 + }, + { + "epoch": 0.383180655277709, + "grad_norm": 6.843413848536806, + "learning_rate": 2.1212320159782035e-06, + "loss": 0.5392, + "step": 9286 + }, + { + "epoch": 0.3832219196170669, + "grad_norm": 2.248008371932694, + "learning_rate": 2.1210495358638863e-06, + "loss": 0.5392, + "step": 9287 + }, + { + "epoch": 0.38326318395642484, + "grad_norm": 3.558276775472746, + "learning_rate": 2.12086704465607e-06, + "loss": 0.5711, + "step": 9288 + }, + { + "epoch": 0.3833044482957828, + "grad_norm": 6.676359836747243, + "learning_rate": 2.1206845423580158e-06, + "loss": 0.5594, + "step": 9289 + }, + { + "epoch": 0.3833457126351407, + "grad_norm": 2.8154173110538108, + "learning_rate": 2.1205020289729834e-06, + "loss": 0.5408, + "step": 9290 + }, + { + "epoch": 0.38338697697449864, + "grad_norm": 4.753603378708617, + "learning_rate": 2.120319504504232e-06, + "loss": 0.5697, + "step": 9291 + }, + { + "epoch": 0.38342824131385655, + "grad_norm": 5.123173973434217, + "learning_rate": 2.120136968955022e-06, + "loss": 0.5288, + "step": 9292 + }, + { + "epoch": 0.38346950565321447, + "grad_norm": 2.8824997964835553, + "learning_rate": 2.1199544223286146e-06, + "loss": 0.5226, + "step": 9293 + }, + { + "epoch": 0.38351076999257244, + "grad_norm": 4.633988580906897, + "learning_rate": 2.11977186462827e-06, + "loss": 0.5477, + "step": 9294 + }, + { + "epoch": 0.38355203433193036, + "grad_norm": 7.6739021702339105, + "learning_rate": 2.11958929585725e-06, + "loss": 0.5847, + "step": 9295 + }, + { + "epoch": 0.3835932986712883, + "grad_norm": 2.055185376574055, + "learning_rate": 2.1194067160188154e-06, + "loss": 0.5156, + "step": 9296 + }, + { + "epoch": 0.3836345630106462, + "grad_norm": 3.088713687827616, + "learning_rate": 2.119224125116227e-06, + "loss": 0.5027, + "step": 9297 + }, + { + "epoch": 0.3836758273500041, + "grad_norm": 3.501082098117421, + "learning_rate": 2.119041523152747e-06, + "loss": 0.4765, + "step": 9298 + }, + { + "epoch": 0.3837170916893621, + "grad_norm": 3.146778414994223, + "learning_rate": 2.1188589101316365e-06, + "loss": 0.5485, + "step": 9299 + }, + { + "epoch": 0.38375835602872, + "grad_norm": 4.307032945793184, + "learning_rate": 2.1186762860561577e-06, + "loss": 0.5683, + "step": 9300 + }, + { + "epoch": 0.3837996203680779, + "grad_norm": 2.4579474944406408, + "learning_rate": 2.1184936509295727e-06, + "loss": 0.4935, + "step": 9301 + }, + { + "epoch": 0.3838408847074358, + "grad_norm": 3.3484350424945517, + "learning_rate": 2.1183110047551436e-06, + "loss": 0.63, + "step": 9302 + }, + { + "epoch": 0.38388214904679374, + "grad_norm": 2.7146004273229267, + "learning_rate": 2.118128347536134e-06, + "loss": 0.609, + "step": 9303 + }, + { + "epoch": 0.3839234133861517, + "grad_norm": 2.2433128777370768, + "learning_rate": 2.1179456792758052e-06, + "loss": 0.4796, + "step": 9304 + }, + { + "epoch": 0.38396467772550963, + "grad_norm": 8.430253639124773, + "learning_rate": 2.117762999977421e-06, + "loss": 0.5074, + "step": 9305 + }, + { + "epoch": 0.38400594206486754, + "grad_norm": 2.13828336703755, + "learning_rate": 2.117580309644244e-06, + "loss": 0.5643, + "step": 9306 + }, + { + "epoch": 0.38404720640422546, + "grad_norm": 2.654287582090554, + "learning_rate": 2.117397608279538e-06, + "loss": 0.5501, + "step": 9307 + }, + { + "epoch": 0.3840884707435834, + "grad_norm": 7.560148848261848, + "learning_rate": 2.1172148958865654e-06, + "loss": 0.5547, + "step": 9308 + }, + { + "epoch": 0.38412973508294135, + "grad_norm": 4.163446094265286, + "learning_rate": 2.1170321724685914e-06, + "loss": 0.5474, + "step": 9309 + }, + { + "epoch": 0.38417099942229926, + "grad_norm": 2.9484996545992175, + "learning_rate": 2.1168494380288794e-06, + "loss": 0.6067, + "step": 9310 + }, + { + "epoch": 0.3842122637616572, + "grad_norm": 4.207025716336314, + "learning_rate": 2.116666692570693e-06, + "loss": 0.5633, + "step": 9311 + }, + { + "epoch": 0.3842535281010151, + "grad_norm": 2.6382491999529547, + "learning_rate": 2.1164839360972967e-06, + "loss": 0.4994, + "step": 9312 + }, + { + "epoch": 0.384294792440373, + "grad_norm": 2.4692924058797483, + "learning_rate": 2.116301168611955e-06, + "loss": 0.4769, + "step": 9313 + }, + { + "epoch": 0.384336056779731, + "grad_norm": 3.574810980465391, + "learning_rate": 2.1161183901179328e-06, + "loss": 0.5278, + "step": 9314 + }, + { + "epoch": 0.3843773211190889, + "grad_norm": 2.6232161924687403, + "learning_rate": 2.1159356006184953e-06, + "loss": 0.5678, + "step": 9315 + }, + { + "epoch": 0.3844185854584468, + "grad_norm": 3.5001072171444845, + "learning_rate": 2.1157528001169067e-06, + "loss": 0.5699, + "step": 9316 + }, + { + "epoch": 0.38445984979780473, + "grad_norm": 3.1235948041861596, + "learning_rate": 2.115569988616433e-06, + "loss": 0.5895, + "step": 9317 + }, + { + "epoch": 0.38450111413716265, + "grad_norm": 2.4750695222453487, + "learning_rate": 2.115387166120339e-06, + "loss": 0.485, + "step": 9318 + }, + { + "epoch": 0.38454237847652056, + "grad_norm": 5.154287775342432, + "learning_rate": 2.115204332631891e-06, + "loss": 0.526, + "step": 9319 + }, + { + "epoch": 0.38458364281587853, + "grad_norm": 2.97102160199726, + "learning_rate": 2.1150214881543545e-06, + "loss": 0.5789, + "step": 9320 + }, + { + "epoch": 0.38462490715523645, + "grad_norm": 3.7551896418667883, + "learning_rate": 2.1148386326909955e-06, + "loss": 0.5049, + "step": 9321 + }, + { + "epoch": 0.38466617149459437, + "grad_norm": 6.362332499302421, + "learning_rate": 2.1146557662450806e-06, + "loss": 0.5125, + "step": 9322 + }, + { + "epoch": 0.3847074358339523, + "grad_norm": 9.92643369561087, + "learning_rate": 2.114472888819877e-06, + "loss": 0.4852, + "step": 9323 + }, + { + "epoch": 0.3847487001733102, + "grad_norm": 2.1134435688538513, + "learning_rate": 2.1142900004186494e-06, + "loss": 0.5486, + "step": 9324 + }, + { + "epoch": 0.38478996451266817, + "grad_norm": 5.565099078736217, + "learning_rate": 2.1141071010446658e-06, + "loss": 0.5493, + "step": 9325 + }, + { + "epoch": 0.3848312288520261, + "grad_norm": 4.952874492310456, + "learning_rate": 2.113924190701193e-06, + "loss": 0.5185, + "step": 9326 + }, + { + "epoch": 0.384872493191384, + "grad_norm": 4.095797009464278, + "learning_rate": 2.113741269391498e-06, + "loss": 0.5456, + "step": 9327 + }, + { + "epoch": 0.3849137575307419, + "grad_norm": 3.8843886275030224, + "learning_rate": 2.1135583371188492e-06, + "loss": 0.5744, + "step": 9328 + }, + { + "epoch": 0.38495502187009983, + "grad_norm": 5.3323697264340835, + "learning_rate": 2.1133753938865135e-06, + "loss": 0.5087, + "step": 9329 + }, + { + "epoch": 0.3849962862094578, + "grad_norm": 14.640370690350407, + "learning_rate": 2.1131924396977596e-06, + "loss": 0.5287, + "step": 9330 + }, + { + "epoch": 0.3850375505488157, + "grad_norm": 3.347321436716296, + "learning_rate": 2.1130094745558537e-06, + "loss": 0.5465, + "step": 9331 + }, + { + "epoch": 0.38507881488817364, + "grad_norm": 2.098901022573146, + "learning_rate": 2.1128264984640653e-06, + "loss": 0.5291, + "step": 9332 + }, + { + "epoch": 0.38512007922753155, + "grad_norm": 3.651088636514343, + "learning_rate": 2.1126435114256627e-06, + "loss": 0.549, + "step": 9333 + }, + { + "epoch": 0.38516134356688947, + "grad_norm": 3.141161727610149, + "learning_rate": 2.112460513443914e-06, + "loss": 0.5252, + "step": 9334 + }, + { + "epoch": 0.38520260790624744, + "grad_norm": 7.91306151610609, + "learning_rate": 2.112277504522089e-06, + "loss": 0.531, + "step": 9335 + }, + { + "epoch": 0.38524387224560536, + "grad_norm": 1.8891512972347237, + "learning_rate": 2.1120944846634563e-06, + "loss": 0.5004, + "step": 9336 + }, + { + "epoch": 0.3852851365849633, + "grad_norm": 2.7719671086196573, + "learning_rate": 2.111911453871284e-06, + "loss": 0.494, + "step": 9337 + }, + { + "epoch": 0.3853264009243212, + "grad_norm": 2.363397537949951, + "learning_rate": 2.111728412148843e-06, + "loss": 0.5151, + "step": 9338 + }, + { + "epoch": 0.3853676652636791, + "grad_norm": 2.099251188670098, + "learning_rate": 2.1115453594994017e-06, + "loss": 0.5385, + "step": 9339 + }, + { + "epoch": 0.3854089296030371, + "grad_norm": 3.883872891787967, + "learning_rate": 2.111362295926231e-06, + "loss": 0.5728, + "step": 9340 + }, + { + "epoch": 0.385450193942395, + "grad_norm": 3.671289825503061, + "learning_rate": 2.1111792214325998e-06, + "loss": 0.4428, + "step": 9341 + }, + { + "epoch": 0.3854914582817529, + "grad_norm": 2.939064914484261, + "learning_rate": 2.110996136021779e-06, + "loss": 0.4771, + "step": 9342 + }, + { + "epoch": 0.3855327226211108, + "grad_norm": 2.549527731393649, + "learning_rate": 2.110813039697039e-06, + "loss": 0.5278, + "step": 9343 + }, + { + "epoch": 0.38557398696046874, + "grad_norm": 8.668146364423967, + "learning_rate": 2.1106299324616502e-06, + "loss": 0.5348, + "step": 9344 + }, + { + "epoch": 0.3856152512998267, + "grad_norm": 5.416768398515109, + "learning_rate": 2.110446814318882e-06, + "loss": 0.5725, + "step": 9345 + }, + { + "epoch": 0.3856565156391846, + "grad_norm": 4.078445761374712, + "learning_rate": 2.110263685272008e-06, + "loss": 0.5118, + "step": 9346 + }, + { + "epoch": 0.38569777997854254, + "grad_norm": 2.992202124422457, + "learning_rate": 2.110080545324297e-06, + "loss": 0.5807, + "step": 9347 + }, + { + "epoch": 0.38573904431790046, + "grad_norm": 2.4864703590520736, + "learning_rate": 2.1098973944790217e-06, + "loss": 0.5018, + "step": 9348 + }, + { + "epoch": 0.3857803086572584, + "grad_norm": 3.6111394209209196, + "learning_rate": 2.1097142327394537e-06, + "loss": 0.5632, + "step": 9349 + }, + { + "epoch": 0.38582157299661635, + "grad_norm": 2.9504029380616554, + "learning_rate": 2.1095310601088635e-06, + "loss": 0.5874, + "step": 9350 + }, + { + "epoch": 0.38586283733597426, + "grad_norm": 2.7956995897519077, + "learning_rate": 2.109347876590524e-06, + "loss": 0.5165, + "step": 9351 + }, + { + "epoch": 0.3859041016753322, + "grad_norm": 8.588508924941436, + "learning_rate": 2.109164682187707e-06, + "loss": 0.5518, + "step": 9352 + }, + { + "epoch": 0.3859453660146901, + "grad_norm": 4.298274581717108, + "learning_rate": 2.108981476903685e-06, + "loss": 0.4808, + "step": 9353 + }, + { + "epoch": 0.385986630354048, + "grad_norm": 3.0918166216526366, + "learning_rate": 2.1087982607417303e-06, + "loss": 0.5378, + "step": 9354 + }, + { + "epoch": 0.386027894693406, + "grad_norm": 4.010578855685417, + "learning_rate": 2.1086150337051154e-06, + "loss": 0.5272, + "step": 9355 + }, + { + "epoch": 0.3860691590327639, + "grad_norm": 2.6084068388259, + "learning_rate": 2.1084317957971137e-06, + "loss": 0.5327, + "step": 9356 + }, + { + "epoch": 0.3861104233721218, + "grad_norm": 3.2920713432926747, + "learning_rate": 2.1082485470209984e-06, + "loss": 0.5269, + "step": 9357 + }, + { + "epoch": 0.38615168771147973, + "grad_norm": 5.795971999686396, + "learning_rate": 2.108065287380042e-06, + "loss": 0.5099, + "step": 9358 + }, + { + "epoch": 0.38619295205083765, + "grad_norm": 3.2483169631188122, + "learning_rate": 2.1078820168775187e-06, + "loss": 0.5017, + "step": 9359 + }, + { + "epoch": 0.3862342163901956, + "grad_norm": 3.610402742033292, + "learning_rate": 2.107698735516702e-06, + "loss": 0.5384, + "step": 9360 + }, + { + "epoch": 0.38627548072955353, + "grad_norm": 9.146903359000104, + "learning_rate": 2.107515443300865e-06, + "loss": 0.506, + "step": 9361 + }, + { + "epoch": 0.38631674506891145, + "grad_norm": 2.418924173300791, + "learning_rate": 2.107332140233283e-06, + "loss": 0.5146, + "step": 9362 + }, + { + "epoch": 0.38635800940826937, + "grad_norm": 3.0543817592242783, + "learning_rate": 2.1071488263172295e-06, + "loss": 0.5597, + "step": 9363 + }, + { + "epoch": 0.3863992737476273, + "grad_norm": 4.757457293184757, + "learning_rate": 2.106965501555979e-06, + "loss": 0.512, + "step": 9364 + }, + { + "epoch": 0.38644053808698525, + "grad_norm": 4.142794689706001, + "learning_rate": 2.1067821659528065e-06, + "loss": 0.4788, + "step": 9365 + }, + { + "epoch": 0.38648180242634317, + "grad_norm": 3.0962948188037247, + "learning_rate": 2.106598819510986e-06, + "loss": 0.5214, + "step": 9366 + }, + { + "epoch": 0.3865230667657011, + "grad_norm": 2.437197934502495, + "learning_rate": 2.106415462233794e-06, + "loss": 0.4748, + "step": 9367 + }, + { + "epoch": 0.386564331105059, + "grad_norm": 4.370597718872074, + "learning_rate": 2.106232094124504e-06, + "loss": 0.5681, + "step": 9368 + }, + { + "epoch": 0.3866055954444169, + "grad_norm": 3.296077490815802, + "learning_rate": 2.106048715186393e-06, + "loss": 0.5439, + "step": 9369 + }, + { + "epoch": 0.3866468597837749, + "grad_norm": 3.020714218849935, + "learning_rate": 2.1058653254227354e-06, + "loss": 0.5522, + "step": 9370 + }, + { + "epoch": 0.3866881241231328, + "grad_norm": 4.256462315763774, + "learning_rate": 2.1056819248368077e-06, + "loss": 0.5001, + "step": 9371 + }, + { + "epoch": 0.3867293884624907, + "grad_norm": 2.6815708737628428, + "learning_rate": 2.105498513431885e-06, + "loss": 0.5543, + "step": 9372 + }, + { + "epoch": 0.38677065280184864, + "grad_norm": 8.562116445139104, + "learning_rate": 2.1053150912112447e-06, + "loss": 0.5489, + "step": 9373 + }, + { + "epoch": 0.38681191714120655, + "grad_norm": 1.880142188936849, + "learning_rate": 2.105131658178162e-06, + "loss": 0.5049, + "step": 9374 + }, + { + "epoch": 0.3868531814805645, + "grad_norm": 2.9825025826243836, + "learning_rate": 2.104948214335915e-06, + "loss": 0.5024, + "step": 9375 + }, + { + "epoch": 0.38689444581992244, + "grad_norm": 6.993154238345526, + "learning_rate": 2.1047647596877792e-06, + "loss": 0.5471, + "step": 9376 + }, + { + "epoch": 0.38693571015928035, + "grad_norm": 3.010434769240742, + "learning_rate": 2.1045812942370323e-06, + "loss": 0.5621, + "step": 9377 + }, + { + "epoch": 0.38697697449863827, + "grad_norm": 2.8635975152181987, + "learning_rate": 2.10439781798695e-06, + "loss": 0.5245, + "step": 9378 + }, + { + "epoch": 0.3870182388379962, + "grad_norm": 7.471991342735606, + "learning_rate": 2.1042143309408116e-06, + "loss": 0.5354, + "step": 9379 + }, + { + "epoch": 0.3870595031773541, + "grad_norm": 2.6228247331682293, + "learning_rate": 2.104030833101893e-06, + "loss": 0.491, + "step": 9380 + }, + { + "epoch": 0.3871007675167121, + "grad_norm": 2.9632306960091133, + "learning_rate": 2.103847324473473e-06, + "loss": 0.5445, + "step": 9381 + }, + { + "epoch": 0.38714203185607, + "grad_norm": 2.4924146446011806, + "learning_rate": 2.103663805058829e-06, + "loss": 0.612, + "step": 9382 + }, + { + "epoch": 0.3871832961954279, + "grad_norm": 4.429349524244958, + "learning_rate": 2.1034802748612403e-06, + "loss": 0.5579, + "step": 9383 + }, + { + "epoch": 0.3872245605347858, + "grad_norm": 3.094618498699564, + "learning_rate": 2.1032967338839832e-06, + "loss": 0.5202, + "step": 9384 + }, + { + "epoch": 0.38726582487414374, + "grad_norm": 10.675046289272297, + "learning_rate": 2.1031131821303376e-06, + "loss": 0.5657, + "step": 9385 + }, + { + "epoch": 0.3873070892135017, + "grad_norm": 2.327918690184169, + "learning_rate": 2.1029296196035815e-06, + "loss": 0.5275, + "step": 9386 + }, + { + "epoch": 0.3873483535528596, + "grad_norm": 12.703587942473002, + "learning_rate": 2.102746046306994e-06, + "loss": 0.5713, + "step": 9387 + }, + { + "epoch": 0.38738961789221754, + "grad_norm": 2.927716112525178, + "learning_rate": 2.1025624622438544e-06, + "loss": 0.4792, + "step": 9388 + }, + { + "epoch": 0.38743088223157546, + "grad_norm": 3.772227529452666, + "learning_rate": 2.1023788674174425e-06, + "loss": 0.5458, + "step": 9389 + }, + { + "epoch": 0.3874721465709334, + "grad_norm": 6.237883903179138, + "learning_rate": 2.1021952618310363e-06, + "loss": 0.5972, + "step": 9390 + }, + { + "epoch": 0.38751341091029134, + "grad_norm": 3.09789883784978, + "learning_rate": 2.1020116454879165e-06, + "loss": 0.5796, + "step": 9391 + }, + { + "epoch": 0.38755467524964926, + "grad_norm": 3.2623522376291567, + "learning_rate": 2.1018280183913632e-06, + "loss": 0.5891, + "step": 9392 + }, + { + "epoch": 0.3875959395890072, + "grad_norm": 7.448737970524887, + "learning_rate": 2.1016443805446553e-06, + "loss": 0.5187, + "step": 9393 + }, + { + "epoch": 0.3876372039283651, + "grad_norm": 2.6428987372740194, + "learning_rate": 2.101460731951074e-06, + "loss": 0.4794, + "step": 9394 + }, + { + "epoch": 0.387678468267723, + "grad_norm": 4.601350023976499, + "learning_rate": 2.1012770726138994e-06, + "loss": 0.5348, + "step": 9395 + }, + { + "epoch": 0.387719732607081, + "grad_norm": 2.5130940237417447, + "learning_rate": 2.101093402536412e-06, + "loss": 0.5232, + "step": 9396 + }, + { + "epoch": 0.3877609969464389, + "grad_norm": 3.2698189578428014, + "learning_rate": 2.100909721721893e-06, + "loss": 0.508, + "step": 9397 + }, + { + "epoch": 0.3878022612857968, + "grad_norm": 2.9083677932364878, + "learning_rate": 2.1007260301736225e-06, + "loss": 0.5316, + "step": 9398 + }, + { + "epoch": 0.38784352562515473, + "grad_norm": 10.112907667431303, + "learning_rate": 2.1005423278948826e-06, + "loss": 0.5762, + "step": 9399 + }, + { + "epoch": 0.38788478996451264, + "grad_norm": 7.73102348548224, + "learning_rate": 2.1003586148889546e-06, + "loss": 0.5835, + "step": 9400 + }, + { + "epoch": 0.3879260543038706, + "grad_norm": 2.461871986650167, + "learning_rate": 2.1001748911591193e-06, + "loss": 0.5703, + "step": 9401 + }, + { + "epoch": 0.38796731864322853, + "grad_norm": 2.3680377869529368, + "learning_rate": 2.09999115670866e-06, + "loss": 0.531, + "step": 9402 + }, + { + "epoch": 0.38800858298258645, + "grad_norm": 5.306673848616273, + "learning_rate": 2.099807411540857e-06, + "loss": 0.5451, + "step": 9403 + }, + { + "epoch": 0.38804984732194436, + "grad_norm": 14.003009479817456, + "learning_rate": 2.0996236556589935e-06, + "loss": 0.5334, + "step": 9404 + }, + { + "epoch": 0.3880911116613023, + "grad_norm": 3.1873950393529915, + "learning_rate": 2.0994398890663514e-06, + "loss": 0.5354, + "step": 9405 + }, + { + "epoch": 0.38813237600066025, + "grad_norm": 3.5799396081071144, + "learning_rate": 2.099256111766213e-06, + "loss": 0.5813, + "step": 9406 + }, + { + "epoch": 0.38817364034001817, + "grad_norm": 2.322180747936153, + "learning_rate": 2.0990723237618613e-06, + "loss": 0.5471, + "step": 9407 + }, + { + "epoch": 0.3882149046793761, + "grad_norm": 6.4414665083646065, + "learning_rate": 2.09888852505658e-06, + "loss": 0.5045, + "step": 9408 + }, + { + "epoch": 0.388256169018734, + "grad_norm": 3.119452190289549, + "learning_rate": 2.098704715653651e-06, + "loss": 0.6047, + "step": 9409 + }, + { + "epoch": 0.3882974333580919, + "grad_norm": 3.2637650189182565, + "learning_rate": 2.098520895556358e-06, + "loss": 0.5185, + "step": 9410 + }, + { + "epoch": 0.3883386976974499, + "grad_norm": 2.7872869163446268, + "learning_rate": 2.098337064767984e-06, + "loss": 0.501, + "step": 9411 + }, + { + "epoch": 0.3883799620368078, + "grad_norm": 3.8855699855788206, + "learning_rate": 2.098153223291814e-06, + "loss": 0.5488, + "step": 9412 + }, + { + "epoch": 0.3884212263761657, + "grad_norm": 11.141710515503513, + "learning_rate": 2.097969371131131e-06, + "loss": 0.5265, + "step": 9413 + }, + { + "epoch": 0.38846249071552363, + "grad_norm": 12.23743505451123, + "learning_rate": 2.097785508289219e-06, + "loss": 0.5191, + "step": 9414 + }, + { + "epoch": 0.38850375505488155, + "grad_norm": 2.3005105402023203, + "learning_rate": 2.0976016347693624e-06, + "loss": 0.5461, + "step": 9415 + }, + { + "epoch": 0.3885450193942395, + "grad_norm": 2.8865116817855045, + "learning_rate": 2.0974177505748455e-06, + "loss": 0.5873, + "step": 9416 + }, + { + "epoch": 0.38858628373359744, + "grad_norm": 3.3151109447190428, + "learning_rate": 2.097233855708953e-06, + "loss": 0.5383, + "step": 9417 + }, + { + "epoch": 0.38862754807295535, + "grad_norm": 7.555599146573212, + "learning_rate": 2.09704995017497e-06, + "loss": 0.591, + "step": 9418 + }, + { + "epoch": 0.38866881241231327, + "grad_norm": 7.847833915302339, + "learning_rate": 2.0968660339761815e-06, + "loss": 0.5575, + "step": 9419 + }, + { + "epoch": 0.3887100767516712, + "grad_norm": 3.919037896817075, + "learning_rate": 2.0966821071158717e-06, + "loss": 0.5094, + "step": 9420 + }, + { + "epoch": 0.38875134109102916, + "grad_norm": 2.286570323819945, + "learning_rate": 2.0964981695973274e-06, + "loss": 0.5032, + "step": 9421 + }, + { + "epoch": 0.3887926054303871, + "grad_norm": 2.9334391430629894, + "learning_rate": 2.096314221423834e-06, + "loss": 0.5805, + "step": 9422 + }, + { + "epoch": 0.388833869769745, + "grad_norm": 2.320043684616928, + "learning_rate": 2.0961302625986765e-06, + "loss": 0.5411, + "step": 9423 + }, + { + "epoch": 0.3888751341091029, + "grad_norm": 2.5840274578418754, + "learning_rate": 2.095946293125141e-06, + "loss": 0.5617, + "step": 9424 + }, + { + "epoch": 0.3889163984484608, + "grad_norm": 2.491439487702812, + "learning_rate": 2.0957623130065136e-06, + "loss": 0.58, + "step": 9425 + }, + { + "epoch": 0.3889576627878188, + "grad_norm": 51.603263719164076, + "learning_rate": 2.095578322246082e-06, + "loss": 0.5246, + "step": 9426 + }, + { + "epoch": 0.3889989271271767, + "grad_norm": 2.222941428850435, + "learning_rate": 2.0953943208471304e-06, + "loss": 0.6051, + "step": 9427 + }, + { + "epoch": 0.3890401914665346, + "grad_norm": 5.060979308251411, + "learning_rate": 2.0952103088129475e-06, + "loss": 0.5057, + "step": 9428 + }, + { + "epoch": 0.38908145580589254, + "grad_norm": 4.971429836110945, + "learning_rate": 2.0950262861468193e-06, + "loss": 0.4705, + "step": 9429 + }, + { + "epoch": 0.38912272014525046, + "grad_norm": 4.65072281718811, + "learning_rate": 2.094842252852033e-06, + "loss": 0.5247, + "step": 9430 + }, + { + "epoch": 0.3891639844846084, + "grad_norm": 3.766647462526107, + "learning_rate": 2.0946582089318765e-06, + "loss": 0.5331, + "step": 9431 + }, + { + "epoch": 0.38920524882396634, + "grad_norm": 5.240036428919425, + "learning_rate": 2.0944741543896365e-06, + "loss": 0.5455, + "step": 9432 + }, + { + "epoch": 0.38924651316332426, + "grad_norm": 3.9730038114374224, + "learning_rate": 2.0942900892286006e-06, + "loss": 0.5453, + "step": 9433 + }, + { + "epoch": 0.3892877775026822, + "grad_norm": 3.373807537890572, + "learning_rate": 2.0941060134520573e-06, + "loss": 0.5118, + "step": 9434 + }, + { + "epoch": 0.3893290418420401, + "grad_norm": 3.4510752980606, + "learning_rate": 2.0939219270632945e-06, + "loss": 0.595, + "step": 9435 + }, + { + "epoch": 0.38937030618139806, + "grad_norm": 2.441108019368014, + "learning_rate": 2.0937378300656e-06, + "loss": 0.5329, + "step": 9436 + }, + { + "epoch": 0.389411570520756, + "grad_norm": 11.45523771079808, + "learning_rate": 2.093553722462263e-06, + "loss": 0.5119, + "step": 9437 + }, + { + "epoch": 0.3894528348601139, + "grad_norm": 3.2776988737541504, + "learning_rate": 2.093369604256571e-06, + "loss": 0.5344, + "step": 9438 + }, + { + "epoch": 0.3894940991994718, + "grad_norm": 3.2400136901379475, + "learning_rate": 2.0931854754518142e-06, + "loss": 0.5185, + "step": 9439 + }, + { + "epoch": 0.3895353635388297, + "grad_norm": 8.163591101548066, + "learning_rate": 2.09300133605128e-06, + "loss": 0.5577, + "step": 9440 + }, + { + "epoch": 0.3895766278781877, + "grad_norm": 4.692969761522901, + "learning_rate": 2.0928171860582598e-06, + "loss": 0.5496, + "step": 9441 + }, + { + "epoch": 0.3896178922175456, + "grad_norm": 3.9005554183389135, + "learning_rate": 2.0926330254760405e-06, + "loss": 0.5835, + "step": 9442 + }, + { + "epoch": 0.38965915655690353, + "grad_norm": 1.8197397340529498, + "learning_rate": 2.0924488543079135e-06, + "loss": 0.4985, + "step": 9443 + }, + { + "epoch": 0.38970042089626145, + "grad_norm": 1.819532822610733, + "learning_rate": 2.0922646725571673e-06, + "loss": 0.4847, + "step": 9444 + }, + { + "epoch": 0.38974168523561936, + "grad_norm": 4.173448136597512, + "learning_rate": 2.092080480227093e-06, + "loss": 0.563, + "step": 9445 + }, + { + "epoch": 0.3897829495749773, + "grad_norm": 2.2829787501679366, + "learning_rate": 2.09189627732098e-06, + "loss": 0.5717, + "step": 9446 + }, + { + "epoch": 0.38982421391433525, + "grad_norm": 4.204136985422711, + "learning_rate": 2.091712063842119e-06, + "loss": 0.5586, + "step": 9447 + }, + { + "epoch": 0.38986547825369317, + "grad_norm": 2.346905573110743, + "learning_rate": 2.0915278397938e-06, + "loss": 0.5078, + "step": 9448 + }, + { + "epoch": 0.3899067425930511, + "grad_norm": 3.1903594046629995, + "learning_rate": 2.091343605179314e-06, + "loss": 0.515, + "step": 9449 + }, + { + "epoch": 0.389948006932409, + "grad_norm": 2.187531610230457, + "learning_rate": 2.0911593600019515e-06, + "loss": 0.4756, + "step": 9450 + }, + { + "epoch": 0.3899892712717669, + "grad_norm": 3.0895093397697333, + "learning_rate": 2.0909751042650047e-06, + "loss": 0.5209, + "step": 9451 + }, + { + "epoch": 0.3900305356111249, + "grad_norm": 5.55751186197762, + "learning_rate": 2.0907908379717637e-06, + "loss": 0.5608, + "step": 9452 + }, + { + "epoch": 0.3900717999504828, + "grad_norm": 2.242990103255516, + "learning_rate": 2.0906065611255205e-06, + "loss": 0.5511, + "step": 9453 + }, + { + "epoch": 0.3901130642898407, + "grad_norm": 2.4821033057011737, + "learning_rate": 2.0904222737295666e-06, + "loss": 0.5379, + "step": 9454 + }, + { + "epoch": 0.39015432862919863, + "grad_norm": 71.58600775699392, + "learning_rate": 2.0902379757871944e-06, + "loss": 0.5454, + "step": 9455 + }, + { + "epoch": 0.39019559296855655, + "grad_norm": 3.681211342379979, + "learning_rate": 2.0900536673016944e-06, + "loss": 0.5683, + "step": 9456 + }, + { + "epoch": 0.3902368573079145, + "grad_norm": 4.678320891177989, + "learning_rate": 2.0898693482763605e-06, + "loss": 0.5317, + "step": 9457 + }, + { + "epoch": 0.39027812164727244, + "grad_norm": 3.7045146878701125, + "learning_rate": 2.0896850187144843e-06, + "loss": 0.5722, + "step": 9458 + }, + { + "epoch": 0.39031938598663035, + "grad_norm": 15.01938009239126, + "learning_rate": 2.0895006786193583e-06, + "loss": 0.493, + "step": 9459 + }, + { + "epoch": 0.39036065032598827, + "grad_norm": 4.669625193604469, + "learning_rate": 2.0893163279942757e-06, + "loss": 0.5613, + "step": 9460 + }, + { + "epoch": 0.3904019146653462, + "grad_norm": 5.198413273988977, + "learning_rate": 2.0891319668425294e-06, + "loss": 0.5095, + "step": 9461 + }, + { + "epoch": 0.39044317900470416, + "grad_norm": 3.5762496372427353, + "learning_rate": 2.0889475951674122e-06, + "loss": 0.5014, + "step": 9462 + }, + { + "epoch": 0.39048444334406207, + "grad_norm": 2.377307918334369, + "learning_rate": 2.088763212972218e-06, + "loss": 0.5702, + "step": 9463 + }, + { + "epoch": 0.39052570768342, + "grad_norm": 5.573294250405593, + "learning_rate": 2.088578820260239e-06, + "loss": 0.5596, + "step": 9464 + }, + { + "epoch": 0.3905669720227779, + "grad_norm": 6.600937098937394, + "learning_rate": 2.088394417034771e-06, + "loss": 0.5303, + "step": 9465 + }, + { + "epoch": 0.3906082363621358, + "grad_norm": 3.6376608398405197, + "learning_rate": 2.0882100032991057e-06, + "loss": 0.5508, + "step": 9466 + }, + { + "epoch": 0.3906495007014938, + "grad_norm": 3.140227342805686, + "learning_rate": 2.088025579056539e-06, + "loss": 0.5704, + "step": 9467 + }, + { + "epoch": 0.3906907650408517, + "grad_norm": 1.972077218144688, + "learning_rate": 2.0878411443103638e-06, + "loss": 0.4916, + "step": 9468 + }, + { + "epoch": 0.3907320293802096, + "grad_norm": 2.35527155650963, + "learning_rate": 2.087656699063876e-06, + "loss": 0.4663, + "step": 9469 + }, + { + "epoch": 0.39077329371956754, + "grad_norm": 2.2568282507474144, + "learning_rate": 2.087472243320369e-06, + "loss": 0.5483, + "step": 9470 + }, + { + "epoch": 0.39081455805892545, + "grad_norm": 3.5554857256009713, + "learning_rate": 2.0872877770831383e-06, + "loss": 0.4866, + "step": 9471 + }, + { + "epoch": 0.3908558223982834, + "grad_norm": 5.306243597012056, + "learning_rate": 2.0871033003554786e-06, + "loss": 0.5191, + "step": 9472 + }, + { + "epoch": 0.39089708673764134, + "grad_norm": 9.571338683575515, + "learning_rate": 2.0869188131406852e-06, + "loss": 0.5112, + "step": 9473 + }, + { + "epoch": 0.39093835107699926, + "grad_norm": 4.356439976461362, + "learning_rate": 2.086734315442054e-06, + "loss": 0.552, + "step": 9474 + }, + { + "epoch": 0.3909796154163572, + "grad_norm": 13.381387292589471, + "learning_rate": 2.0865498072628794e-06, + "loss": 0.5604, + "step": 9475 + }, + { + "epoch": 0.3910208797557151, + "grad_norm": 5.743090836632797, + "learning_rate": 2.0863652886064586e-06, + "loss": 0.47, + "step": 9476 + }, + { + "epoch": 0.39106214409507306, + "grad_norm": 2.831865096550002, + "learning_rate": 2.0861807594760865e-06, + "loss": 0.5755, + "step": 9477 + }, + { + "epoch": 0.391103408434431, + "grad_norm": 4.035043703348379, + "learning_rate": 2.0859962198750595e-06, + "loss": 0.5622, + "step": 9478 + }, + { + "epoch": 0.3911446727737889, + "grad_norm": 8.455768771588026, + "learning_rate": 2.0858116698066742e-06, + "loss": 0.6158, + "step": 9479 + }, + { + "epoch": 0.3911859371131468, + "grad_norm": 2.716794374186173, + "learning_rate": 2.0856271092742273e-06, + "loss": 0.5655, + "step": 9480 + }, + { + "epoch": 0.3912272014525047, + "grad_norm": 5.314016546579499, + "learning_rate": 2.0854425382810154e-06, + "loss": 0.5276, + "step": 9481 + }, + { + "epoch": 0.3912684657918627, + "grad_norm": 3.1611016752598964, + "learning_rate": 2.0852579568303347e-06, + "loss": 0.5118, + "step": 9482 + }, + { + "epoch": 0.3913097301312206, + "grad_norm": 2.8586343245394086, + "learning_rate": 2.0850733649254827e-06, + "loss": 0.5324, + "step": 9483 + }, + { + "epoch": 0.39135099447057853, + "grad_norm": 2.323717680978935, + "learning_rate": 2.084888762569757e-06, + "loss": 0.5691, + "step": 9484 + }, + { + "epoch": 0.39139225880993644, + "grad_norm": 3.314841621773769, + "learning_rate": 2.0847041497664545e-06, + "loss": 0.5541, + "step": 9485 + }, + { + "epoch": 0.39143352314929436, + "grad_norm": 1.8694886590601691, + "learning_rate": 2.0845195265188738e-06, + "loss": 0.4949, + "step": 9486 + }, + { + "epoch": 0.39147478748865233, + "grad_norm": 4.9360140252954805, + "learning_rate": 2.0843348928303117e-06, + "loss": 0.4792, + "step": 9487 + }, + { + "epoch": 0.39151605182801025, + "grad_norm": 3.298610922447878, + "learning_rate": 2.084150248704067e-06, + "loss": 0.534, + "step": 9488 + }, + { + "epoch": 0.39155731616736816, + "grad_norm": 1.9450143740108579, + "learning_rate": 2.083965594143437e-06, + "loss": 0.4649, + "step": 9489 + }, + { + "epoch": 0.3915985805067261, + "grad_norm": 2.0128825213827732, + "learning_rate": 2.083780929151721e-06, + "loss": 0.5549, + "step": 9490 + }, + { + "epoch": 0.391639844846084, + "grad_norm": 21.138919444144985, + "learning_rate": 2.083596253732217e-06, + "loss": 0.5342, + "step": 9491 + }, + { + "epoch": 0.39168110918544197, + "grad_norm": 29.158672768325328, + "learning_rate": 2.0834115678882245e-06, + "loss": 0.5677, + "step": 9492 + }, + { + "epoch": 0.3917223735247999, + "grad_norm": 3.3742224919093107, + "learning_rate": 2.083226871623041e-06, + "loss": 0.4977, + "step": 9493 + }, + { + "epoch": 0.3917636378641578, + "grad_norm": 5.860702884550497, + "learning_rate": 2.0830421649399676e-06, + "loss": 0.5702, + "step": 9494 + }, + { + "epoch": 0.3918049022035157, + "grad_norm": 2.3971750031002297, + "learning_rate": 2.082857447842302e-06, + "loss": 0.5828, + "step": 9495 + }, + { + "epoch": 0.39184616654287363, + "grad_norm": 3.8527852114695627, + "learning_rate": 2.082672720333344e-06, + "loss": 0.5665, + "step": 9496 + }, + { + "epoch": 0.3918874308822316, + "grad_norm": 7.750965344500329, + "learning_rate": 2.0824879824163944e-06, + "loss": 0.6206, + "step": 9497 + }, + { + "epoch": 0.3919286952215895, + "grad_norm": 4.211507041459311, + "learning_rate": 2.0823032340947517e-06, + "loss": 0.5454, + "step": 9498 + }, + { + "epoch": 0.39196995956094743, + "grad_norm": 4.647908803956429, + "learning_rate": 2.0821184753717163e-06, + "loss": 0.5572, + "step": 9499 + }, + { + "epoch": 0.39201122390030535, + "grad_norm": 27.844727175201314, + "learning_rate": 2.081933706250589e-06, + "loss": 0.5493, + "step": 9500 + }, + { + "epoch": 0.39205248823966327, + "grad_norm": 2.892949291207968, + "learning_rate": 2.0817489267346704e-06, + "loss": 0.5343, + "step": 9501 + }, + { + "epoch": 0.39209375257902124, + "grad_norm": 2.1011260783598087, + "learning_rate": 2.0815641368272603e-06, + "loss": 0.5351, + "step": 9502 + }, + { + "epoch": 0.39213501691837915, + "grad_norm": 2.7901262025032056, + "learning_rate": 2.0813793365316594e-06, + "loss": 0.5357, + "step": 9503 + }, + { + "epoch": 0.39217628125773707, + "grad_norm": 3.927882906590825, + "learning_rate": 2.0811945258511703e-06, + "loss": 0.5635, + "step": 9504 + }, + { + "epoch": 0.392217545597095, + "grad_norm": 21.250213376385855, + "learning_rate": 2.0810097047890918e-06, + "loss": 0.4811, + "step": 9505 + }, + { + "epoch": 0.3922588099364529, + "grad_norm": 3.7468267749844104, + "learning_rate": 2.0808248733487272e-06, + "loss": 0.5517, + "step": 9506 + }, + { + "epoch": 0.3923000742758108, + "grad_norm": 2.940416439232557, + "learning_rate": 2.080640031533377e-06, + "loss": 0.5985, + "step": 9507 + }, + { + "epoch": 0.3923413386151688, + "grad_norm": 3.6716100076216356, + "learning_rate": 2.0804551793463437e-06, + "loss": 0.5549, + "step": 9508 + }, + { + "epoch": 0.3923826029545267, + "grad_norm": 3.5665973794228196, + "learning_rate": 2.0802703167909286e-06, + "loss": 0.5256, + "step": 9509 + }, + { + "epoch": 0.3924238672938846, + "grad_norm": 2.3818105072892344, + "learning_rate": 2.0800854438704344e-06, + "loss": 0.5679, + "step": 9510 + }, + { + "epoch": 0.39246513163324254, + "grad_norm": 3.8001910939368253, + "learning_rate": 2.0799005605881624e-06, + "loss": 0.5892, + "step": 9511 + }, + { + "epoch": 0.39250639597260045, + "grad_norm": 2.324241065408148, + "learning_rate": 2.0797156669474157e-06, + "loss": 0.4965, + "step": 9512 + }, + { + "epoch": 0.3925476603119584, + "grad_norm": 3.035062242843223, + "learning_rate": 2.0795307629514973e-06, + "loss": 0.5575, + "step": 9513 + }, + { + "epoch": 0.39258892465131634, + "grad_norm": 2.8664893578147064, + "learning_rate": 2.0793458486037096e-06, + "loss": 0.478, + "step": 9514 + }, + { + "epoch": 0.39263018899067426, + "grad_norm": 2.5242271868531683, + "learning_rate": 2.079160923907356e-06, + "loss": 0.5416, + "step": 9515 + }, + { + "epoch": 0.39267145333003217, + "grad_norm": 2.4929256474229704, + "learning_rate": 2.078975988865739e-06, + "loss": 0.5446, + "step": 9516 + }, + { + "epoch": 0.3927127176693901, + "grad_norm": 5.093519643995561, + "learning_rate": 2.078791043482163e-06, + "loss": 0.5089, + "step": 9517 + }, + { + "epoch": 0.39275398200874806, + "grad_norm": 2.8011030407653377, + "learning_rate": 2.0786060877599305e-06, + "loss": 0.5631, + "step": 9518 + }, + { + "epoch": 0.392795246348106, + "grad_norm": 4.361740995030555, + "learning_rate": 2.0784211217023463e-06, + "loss": 0.5506, + "step": 9519 + }, + { + "epoch": 0.3928365106874639, + "grad_norm": 9.4644240675014, + "learning_rate": 2.0782361453127136e-06, + "loss": 0.5352, + "step": 9520 + }, + { + "epoch": 0.3928777750268218, + "grad_norm": 3.2569593370032988, + "learning_rate": 2.0780511585943373e-06, + "loss": 0.562, + "step": 9521 + }, + { + "epoch": 0.3929190393661797, + "grad_norm": 3.947807191874679, + "learning_rate": 2.07786616155052e-06, + "loss": 0.5622, + "step": 9522 + }, + { + "epoch": 0.3929603037055377, + "grad_norm": 3.9667296337786397, + "learning_rate": 2.077681154184569e-06, + "loss": 0.6038, + "step": 9523 + }, + { + "epoch": 0.3930015680448956, + "grad_norm": 2.845448794341835, + "learning_rate": 2.077496136499786e-06, + "loss": 0.5465, + "step": 9524 + }, + { + "epoch": 0.3930428323842535, + "grad_norm": 2.171000313337155, + "learning_rate": 2.0773111084994784e-06, + "loss": 0.5661, + "step": 9525 + }, + { + "epoch": 0.39308409672361144, + "grad_norm": 21.423478262783892, + "learning_rate": 2.07712607018695e-06, + "loss": 0.5395, + "step": 9526 + }, + { + "epoch": 0.39312536106296936, + "grad_norm": 2.249091592727261, + "learning_rate": 2.0769410215655063e-06, + "loss": 0.578, + "step": 9527 + }, + { + "epoch": 0.39316662540232733, + "grad_norm": 11.325546245517058, + "learning_rate": 2.076755962638452e-06, + "loss": 0.5339, + "step": 9528 + }, + { + "epoch": 0.39320788974168525, + "grad_norm": 2.152477714874574, + "learning_rate": 2.076570893409094e-06, + "loss": 0.5863, + "step": 9529 + }, + { + "epoch": 0.39324915408104316, + "grad_norm": 2.940632703683214, + "learning_rate": 2.076385813880737e-06, + "loss": 0.5268, + "step": 9530 + }, + { + "epoch": 0.3932904184204011, + "grad_norm": 2.197280835887328, + "learning_rate": 2.076200724056688e-06, + "loss": 0.558, + "step": 9531 + }, + { + "epoch": 0.393331682759759, + "grad_norm": 19.808577663663215, + "learning_rate": 2.076015623940252e-06, + "loss": 0.5827, + "step": 9532 + }, + { + "epoch": 0.39337294709911697, + "grad_norm": 2.82078553991048, + "learning_rate": 2.0758305135347367e-06, + "loss": 0.5534, + "step": 9533 + }, + { + "epoch": 0.3934142114384749, + "grad_norm": 2.399537691108118, + "learning_rate": 2.075645392843448e-06, + "loss": 0.5119, + "step": 9534 + }, + { + "epoch": 0.3934554757778328, + "grad_norm": 14.436092031744488, + "learning_rate": 2.075460261869692e-06, + "loss": 0.51, + "step": 9535 + }, + { + "epoch": 0.3934967401171907, + "grad_norm": 10.024318057829545, + "learning_rate": 2.075275120616776e-06, + "loss": 0.5567, + "step": 9536 + }, + { + "epoch": 0.39353800445654863, + "grad_norm": 1.8831950572459848, + "learning_rate": 2.0750899690880078e-06, + "loss": 0.5214, + "step": 9537 + }, + { + "epoch": 0.3935792687959066, + "grad_norm": 3.0888320253325983, + "learning_rate": 2.0749048072866935e-06, + "loss": 0.5257, + "step": 9538 + }, + { + "epoch": 0.3936205331352645, + "grad_norm": 2.699382344035271, + "learning_rate": 2.0747196352161416e-06, + "loss": 0.5372, + "step": 9539 + }, + { + "epoch": 0.39366179747462243, + "grad_norm": 3.149559439043944, + "learning_rate": 2.0745344528796592e-06, + "loss": 0.5402, + "step": 9540 + }, + { + "epoch": 0.39370306181398035, + "grad_norm": 9.040322115923294, + "learning_rate": 2.0743492602805544e-06, + "loss": 0.4779, + "step": 9541 + }, + { + "epoch": 0.39374432615333826, + "grad_norm": 2.959110033248147, + "learning_rate": 2.074164057422135e-06, + "loss": 0.5725, + "step": 9542 + }, + { + "epoch": 0.39378559049269624, + "grad_norm": 2.4198864569887415, + "learning_rate": 2.0739788443077087e-06, + "loss": 0.5833, + "step": 9543 + }, + { + "epoch": 0.39382685483205415, + "grad_norm": 8.382425689856513, + "learning_rate": 2.0737936209405844e-06, + "loss": 0.5008, + "step": 9544 + }, + { + "epoch": 0.39386811917141207, + "grad_norm": 7.392731418019564, + "learning_rate": 2.073608387324071e-06, + "loss": 0.4991, + "step": 9545 + }, + { + "epoch": 0.39390938351077, + "grad_norm": 2.1800261260697695, + "learning_rate": 2.0734231434614764e-06, + "loss": 0.5078, + "step": 9546 + }, + { + "epoch": 0.3939506478501279, + "grad_norm": 3.368872864408108, + "learning_rate": 2.0732378893561105e-06, + "loss": 0.5736, + "step": 9547 + }, + { + "epoch": 0.39399191218948587, + "grad_norm": 3.0028691410361477, + "learning_rate": 2.0730526250112817e-06, + "loss": 0.4826, + "step": 9548 + }, + { + "epoch": 0.3940331765288438, + "grad_norm": 2.22330293075222, + "learning_rate": 2.0728673504302995e-06, + "loss": 0.5557, + "step": 9549 + }, + { + "epoch": 0.3940744408682017, + "grad_norm": 8.10948974301356, + "learning_rate": 2.0726820656164733e-06, + "loss": 0.5649, + "step": 9550 + }, + { + "epoch": 0.3941157052075596, + "grad_norm": 3.4162445066363714, + "learning_rate": 2.0724967705731126e-06, + "loss": 0.5945, + "step": 9551 + }, + { + "epoch": 0.39415696954691753, + "grad_norm": 2.4771822967738086, + "learning_rate": 2.0723114653035275e-06, + "loss": 0.5648, + "step": 9552 + }, + { + "epoch": 0.3941982338862755, + "grad_norm": 27.326482937250365, + "learning_rate": 2.072126149811028e-06, + "loss": 0.6043, + "step": 9553 + }, + { + "epoch": 0.3942394982256334, + "grad_norm": 2.489904728159963, + "learning_rate": 2.0719408240989244e-06, + "loss": 0.5609, + "step": 9554 + }, + { + "epoch": 0.39428076256499134, + "grad_norm": 3.099346789416552, + "learning_rate": 2.0717554881705265e-06, + "loss": 0.537, + "step": 9555 + }, + { + "epoch": 0.39432202690434925, + "grad_norm": 3.8314308816328495, + "learning_rate": 2.0715701420291457e-06, + "loss": 0.552, + "step": 9556 + }, + { + "epoch": 0.39436329124370717, + "grad_norm": 4.012402927814193, + "learning_rate": 2.0713847856780918e-06, + "loss": 0.5468, + "step": 9557 + }, + { + "epoch": 0.39440455558306514, + "grad_norm": 5.258045328886543, + "learning_rate": 2.0711994191206767e-06, + "loss": 0.5085, + "step": 9558 + }, + { + "epoch": 0.39444581992242306, + "grad_norm": 2.324045300023873, + "learning_rate": 2.0710140423602106e-06, + "loss": 0.5334, + "step": 9559 + }, + { + "epoch": 0.394487084261781, + "grad_norm": 4.54458280812376, + "learning_rate": 2.070828655400006e-06, + "loss": 0.5017, + "step": 9560 + }, + { + "epoch": 0.3945283486011389, + "grad_norm": 3.635177275561351, + "learning_rate": 2.070643258243373e-06, + "loss": 0.5618, + "step": 9561 + }, + { + "epoch": 0.3945696129404968, + "grad_norm": 49.88312123251934, + "learning_rate": 2.070457850893624e-06, + "loss": 0.5437, + "step": 9562 + }, + { + "epoch": 0.3946108772798548, + "grad_norm": 4.2985449458859595, + "learning_rate": 2.0702724333540705e-06, + "loss": 0.4979, + "step": 9563 + }, + { + "epoch": 0.3946521416192127, + "grad_norm": 6.324812964277565, + "learning_rate": 2.0700870056280246e-06, + "loss": 0.4978, + "step": 9564 + }, + { + "epoch": 0.3946934059585706, + "grad_norm": 7.259652101244316, + "learning_rate": 2.0699015677187995e-06, + "loss": 0.5531, + "step": 9565 + }, + { + "epoch": 0.3947346702979285, + "grad_norm": 3.715058812145514, + "learning_rate": 2.0697161196297063e-06, + "loss": 0.5249, + "step": 9566 + }, + { + "epoch": 0.39477593463728644, + "grad_norm": 5.969744519838062, + "learning_rate": 2.0695306613640577e-06, + "loss": 0.5493, + "step": 9567 + }, + { + "epoch": 0.39481719897664436, + "grad_norm": 1.9710878434243804, + "learning_rate": 2.069345192925167e-06, + "loss": 0.5511, + "step": 9568 + }, + { + "epoch": 0.39485846331600233, + "grad_norm": 2.0907706307553173, + "learning_rate": 2.0691597143163467e-06, + "loss": 0.5474, + "step": 9569 + }, + { + "epoch": 0.39489972765536024, + "grad_norm": 2.2844968349807258, + "learning_rate": 2.06897422554091e-06, + "loss": 0.5398, + "step": 9570 + }, + { + "epoch": 0.39494099199471816, + "grad_norm": 5.734220506950039, + "learning_rate": 2.068788726602171e-06, + "loss": 0.5043, + "step": 9571 + }, + { + "epoch": 0.3949822563340761, + "grad_norm": 4.371054208415773, + "learning_rate": 2.0686032175034418e-06, + "loss": 0.5621, + "step": 9572 + }, + { + "epoch": 0.395023520673434, + "grad_norm": 2.220772305147235, + "learning_rate": 2.0684176982480367e-06, + "loss": 0.5329, + "step": 9573 + }, + { + "epoch": 0.39506478501279196, + "grad_norm": 4.072897827872875, + "learning_rate": 2.06823216883927e-06, + "loss": 0.5718, + "step": 9574 + }, + { + "epoch": 0.3951060493521499, + "grad_norm": 3.692841268784339, + "learning_rate": 2.068046629280455e-06, + "loss": 0.52, + "step": 9575 + }, + { + "epoch": 0.3951473136915078, + "grad_norm": 2.034925098943741, + "learning_rate": 2.067861079574906e-06, + "loss": 0.5567, + "step": 9576 + }, + { + "epoch": 0.3951885780308657, + "grad_norm": 10.843230956578594, + "learning_rate": 2.0676755197259377e-06, + "loss": 0.5045, + "step": 9577 + }, + { + "epoch": 0.3952298423702236, + "grad_norm": 21.890664524744164, + "learning_rate": 2.0674899497368644e-06, + "loss": 0.5181, + "step": 9578 + }, + { + "epoch": 0.3952711067095816, + "grad_norm": 2.062031796157821, + "learning_rate": 2.067304369611001e-06, + "loss": 0.4971, + "step": 9579 + }, + { + "epoch": 0.3953123710489395, + "grad_norm": 2.019527229157427, + "learning_rate": 2.067118779351663e-06, + "loss": 0.46, + "step": 9580 + }, + { + "epoch": 0.39535363538829743, + "grad_norm": 3.3541671803879214, + "learning_rate": 2.066933178962164e-06, + "loss": 0.4797, + "step": 9581 + }, + { + "epoch": 0.39539489972765535, + "grad_norm": 3.2942993659488407, + "learning_rate": 2.0667475684458208e-06, + "loss": 0.4915, + "step": 9582 + }, + { + "epoch": 0.39543616406701326, + "grad_norm": 9.584456823114174, + "learning_rate": 2.066561947805947e-06, + "loss": 0.5468, + "step": 9583 + }, + { + "epoch": 0.39547742840637123, + "grad_norm": 2.382040713138675, + "learning_rate": 2.0663763170458607e-06, + "loss": 0.4801, + "step": 9584 + }, + { + "epoch": 0.39551869274572915, + "grad_norm": 2.8188904003464166, + "learning_rate": 2.066190676168876e-06, + "loss": 0.5314, + "step": 9585 + }, + { + "epoch": 0.39555995708508707, + "grad_norm": 3.826041727660922, + "learning_rate": 2.06600502517831e-06, + "loss": 0.5507, + "step": 9586 + }, + { + "epoch": 0.395601221424445, + "grad_norm": 5.920486916703979, + "learning_rate": 2.0658193640774782e-06, + "loss": 0.5693, + "step": 9587 + }, + { + "epoch": 0.3956424857638029, + "grad_norm": 3.288668982242925, + "learning_rate": 2.065633692869697e-06, + "loss": 0.5331, + "step": 9588 + }, + { + "epoch": 0.39568375010316087, + "grad_norm": 4.826795195931132, + "learning_rate": 2.0654480115582827e-06, + "loss": 0.5116, + "step": 9589 + }, + { + "epoch": 0.3957250144425188, + "grad_norm": 3.3647345227999863, + "learning_rate": 2.065262320146553e-06, + "loss": 0.5149, + "step": 9590 + }, + { + "epoch": 0.3957662787818767, + "grad_norm": 5.866856370216083, + "learning_rate": 2.0650766186378235e-06, + "loss": 0.498, + "step": 9591 + }, + { + "epoch": 0.3958075431212346, + "grad_norm": 13.840668729278507, + "learning_rate": 2.064890907035412e-06, + "loss": 0.5432, + "step": 9592 + }, + { + "epoch": 0.39584880746059253, + "grad_norm": 5.832732904212803, + "learning_rate": 2.0647051853426362e-06, + "loss": 0.5639, + "step": 9593 + }, + { + "epoch": 0.3958900717999505, + "grad_norm": 4.568433738199349, + "learning_rate": 2.0645194535628128e-06, + "loss": 0.4929, + "step": 9594 + }, + { + "epoch": 0.3959313361393084, + "grad_norm": 2.842870311590725, + "learning_rate": 2.06433371169926e-06, + "loss": 0.5944, + "step": 9595 + }, + { + "epoch": 0.39597260047866634, + "grad_norm": 2.48120828902607, + "learning_rate": 2.064147959755295e-06, + "loss": 0.5527, + "step": 9596 + }, + { + "epoch": 0.39601386481802425, + "grad_norm": 4.995093600308828, + "learning_rate": 2.063962197734237e-06, + "loss": 0.5234, + "step": 9597 + }, + { + "epoch": 0.39605512915738217, + "grad_norm": 4.3865519986640145, + "learning_rate": 2.0637764256394027e-06, + "loss": 0.5363, + "step": 9598 + }, + { + "epoch": 0.39609639349674014, + "grad_norm": 4.13039784530667, + "learning_rate": 2.063590643474111e-06, + "loss": 0.5703, + "step": 9599 + }, + { + "epoch": 0.39613765783609806, + "grad_norm": 2.243511876567513, + "learning_rate": 2.0634048512416805e-06, + "loss": 0.4978, + "step": 9600 + }, + { + "epoch": 0.39617892217545597, + "grad_norm": 3.4259490286235192, + "learning_rate": 2.0632190489454303e-06, + "loss": 0.4799, + "step": 9601 + }, + { + "epoch": 0.3962201865148139, + "grad_norm": 4.995475380960612, + "learning_rate": 2.0630332365886784e-06, + "loss": 0.4945, + "step": 9602 + }, + { + "epoch": 0.3962614508541718, + "grad_norm": 5.485481213621449, + "learning_rate": 2.0628474141747452e-06, + "loss": 0.5492, + "step": 9603 + }, + { + "epoch": 0.3963027151935298, + "grad_norm": 4.218349237509276, + "learning_rate": 2.062661581706948e-06, + "loss": 0.5178, + "step": 9604 + }, + { + "epoch": 0.3963439795328877, + "grad_norm": 3.5852128938127974, + "learning_rate": 2.0624757391886088e-06, + "loss": 0.5406, + "step": 9605 + }, + { + "epoch": 0.3963852438722456, + "grad_norm": 10.98308987951899, + "learning_rate": 2.062289886623045e-06, + "loss": 0.5407, + "step": 9606 + }, + { + "epoch": 0.3964265082116035, + "grad_norm": 2.426919083468454, + "learning_rate": 2.062104024013577e-06, + "loss": 0.4968, + "step": 9607 + }, + { + "epoch": 0.39646777255096144, + "grad_norm": 2.0693329903355697, + "learning_rate": 2.0619181513635254e-06, + "loss": 0.5183, + "step": 9608 + }, + { + "epoch": 0.3965090368903194, + "grad_norm": 5.021210293593824, + "learning_rate": 2.06173226867621e-06, + "loss": 0.5424, + "step": 9609 + }, + { + "epoch": 0.3965503012296773, + "grad_norm": 5.459636030629896, + "learning_rate": 2.0615463759549503e-06, + "loss": 0.513, + "step": 9610 + }, + { + "epoch": 0.39659156556903524, + "grad_norm": 3.2969136906981977, + "learning_rate": 2.061360473203068e-06, + "loss": 0.5439, + "step": 9611 + }, + { + "epoch": 0.39663282990839316, + "grad_norm": 2.8536451945076915, + "learning_rate": 2.0611745604238836e-06, + "loss": 0.5139, + "step": 9612 + }, + { + "epoch": 0.3966740942477511, + "grad_norm": 2.7675333863387923, + "learning_rate": 2.0609886376207176e-06, + "loss": 0.4668, + "step": 9613 + }, + { + "epoch": 0.39671535858710905, + "grad_norm": 3.025664626420646, + "learning_rate": 2.0608027047968906e-06, + "loss": 0.493, + "step": 9614 + }, + { + "epoch": 0.39675662292646696, + "grad_norm": 2.643891017996073, + "learning_rate": 2.060616761955725e-06, + "loss": 0.5278, + "step": 9615 + }, + { + "epoch": 0.3967978872658249, + "grad_norm": 2.871416773575693, + "learning_rate": 2.060430809100541e-06, + "loss": 0.4534, + "step": 9616 + }, + { + "epoch": 0.3968391516051828, + "grad_norm": 3.800957562661546, + "learning_rate": 2.0602448462346616e-06, + "loss": 0.5024, + "step": 9617 + }, + { + "epoch": 0.3968804159445407, + "grad_norm": 4.428471179698986, + "learning_rate": 2.060058873361407e-06, + "loss": 0.5304, + "step": 9618 + }, + { + "epoch": 0.3969216802838987, + "grad_norm": 2.668154518215769, + "learning_rate": 2.059872890484101e-06, + "loss": 0.5003, + "step": 9619 + }, + { + "epoch": 0.3969629446232566, + "grad_norm": 15.195264983980964, + "learning_rate": 2.0596868976060633e-06, + "loss": 0.5053, + "step": 9620 + }, + { + "epoch": 0.3970042089626145, + "grad_norm": 4.022715425007799, + "learning_rate": 2.059500894730618e-06, + "loss": 0.5324, + "step": 9621 + }, + { + "epoch": 0.39704547330197243, + "grad_norm": 3.787390018585767, + "learning_rate": 2.059314881861087e-06, + "loss": 0.5247, + "step": 9622 + }, + { + "epoch": 0.39708673764133035, + "grad_norm": 2.346305935878744, + "learning_rate": 2.059128859000793e-06, + "loss": 0.553, + "step": 9623 + }, + { + "epoch": 0.3971280019806883, + "grad_norm": 3.8664538044672585, + "learning_rate": 2.0589428261530584e-06, + "loss": 0.5809, + "step": 9624 + }, + { + "epoch": 0.39716926632004623, + "grad_norm": 14.749763739093135, + "learning_rate": 2.0587567833212077e-06, + "loss": 0.5562, + "step": 9625 + }, + { + "epoch": 0.39721053065940415, + "grad_norm": 2.218504698866572, + "learning_rate": 2.0585707305085624e-06, + "loss": 0.5317, + "step": 9626 + }, + { + "epoch": 0.39725179499876206, + "grad_norm": 2.3205432684022043, + "learning_rate": 2.0583846677184466e-06, + "loss": 0.5441, + "step": 9627 + }, + { + "epoch": 0.39729305933812, + "grad_norm": 2.1648269087924943, + "learning_rate": 2.0581985949541837e-06, + "loss": 0.4823, + "step": 9628 + }, + { + "epoch": 0.3973343236774779, + "grad_norm": 2.4769638333395183, + "learning_rate": 2.058012512219098e-06, + "loss": 0.508, + "step": 9629 + }, + { + "epoch": 0.39737558801683587, + "grad_norm": 2.800075830581357, + "learning_rate": 2.0578264195165116e-06, + "loss": 0.5942, + "step": 9630 + }, + { + "epoch": 0.3974168523561938, + "grad_norm": 4.683478250284358, + "learning_rate": 2.057640316849751e-06, + "loss": 0.5783, + "step": 9631 + }, + { + "epoch": 0.3974581166955517, + "grad_norm": 3.031354846112903, + "learning_rate": 2.0574542042221396e-06, + "loss": 0.5439, + "step": 9632 + }, + { + "epoch": 0.3974993810349096, + "grad_norm": 3.341215029285881, + "learning_rate": 2.0572680816370005e-06, + "loss": 0.5218, + "step": 9633 + }, + { + "epoch": 0.39754064537426753, + "grad_norm": 5.753263991512005, + "learning_rate": 2.05708194909766e-06, + "loss": 0.5676, + "step": 9634 + }, + { + "epoch": 0.3975819097136255, + "grad_norm": 1.985379607915487, + "learning_rate": 2.056895806607442e-06, + "loss": 0.5603, + "step": 9635 + }, + { + "epoch": 0.3976231740529834, + "grad_norm": 4.115416424820169, + "learning_rate": 2.0567096541696724e-06, + "loss": 0.5115, + "step": 9636 + }, + { + "epoch": 0.39766443839234134, + "grad_norm": 3.8682386349465876, + "learning_rate": 2.0565234917876753e-06, + "loss": 0.5355, + "step": 9637 + }, + { + "epoch": 0.39770570273169925, + "grad_norm": 4.234030815161392, + "learning_rate": 2.0563373194647764e-06, + "loss": 0.534, + "step": 9638 + }, + { + "epoch": 0.39774696707105717, + "grad_norm": 2.338183230308335, + "learning_rate": 2.056151137204301e-06, + "loss": 0.4984, + "step": 9639 + }, + { + "epoch": 0.39778823141041514, + "grad_norm": 6.020785962128183, + "learning_rate": 2.055964945009576e-06, + "loss": 0.5209, + "step": 9640 + }, + { + "epoch": 0.39782949574977305, + "grad_norm": 3.0367086989824887, + "learning_rate": 2.055778742883925e-06, + "loss": 0.5885, + "step": 9641 + }, + { + "epoch": 0.39787076008913097, + "grad_norm": 2.553884871198012, + "learning_rate": 2.055592530830676e-06, + "loss": 0.4798, + "step": 9642 + }, + { + "epoch": 0.3979120244284889, + "grad_norm": 3.1615963763745674, + "learning_rate": 2.055406308853154e-06, + "loss": 0.5021, + "step": 9643 + }, + { + "epoch": 0.3979532887678468, + "grad_norm": 6.903816750786317, + "learning_rate": 2.0552200769546864e-06, + "loss": 0.5559, + "step": 9644 + }, + { + "epoch": 0.3979945531072048, + "grad_norm": 4.434693406363825, + "learning_rate": 2.0550338351385994e-06, + "loss": 0.54, + "step": 9645 + }, + { + "epoch": 0.3980358174465627, + "grad_norm": 4.5688767701964315, + "learning_rate": 2.0548475834082196e-06, + "loss": 0.503, + "step": 9646 + }, + { + "epoch": 0.3980770817859206, + "grad_norm": 4.583489899975135, + "learning_rate": 2.0546613217668733e-06, + "loss": 0.5077, + "step": 9647 + }, + { + "epoch": 0.3981183461252785, + "grad_norm": 5.418514647007547, + "learning_rate": 2.054475050217889e-06, + "loss": 0.5651, + "step": 9648 + }, + { + "epoch": 0.39815961046463644, + "grad_norm": 2.2797930909282025, + "learning_rate": 2.054288768764593e-06, + "loss": 0.5364, + "step": 9649 + }, + { + "epoch": 0.3982008748039944, + "grad_norm": 4.827249983013821, + "learning_rate": 2.0541024774103124e-06, + "loss": 0.5211, + "step": 9650 + }, + { + "epoch": 0.3982421391433523, + "grad_norm": 6.551836751756891, + "learning_rate": 2.053916176158376e-06, + "loss": 0.5149, + "step": 9651 + }, + { + "epoch": 0.39828340348271024, + "grad_norm": 3.157076971812139, + "learning_rate": 2.053729865012111e-06, + "loss": 0.5381, + "step": 9652 + }, + { + "epoch": 0.39832466782206816, + "grad_norm": 2.4738147243156425, + "learning_rate": 2.0535435439748447e-06, + "loss": 0.5536, + "step": 9653 + }, + { + "epoch": 0.3983659321614261, + "grad_norm": 3.452010378305634, + "learning_rate": 2.053357213049907e-06, + "loss": 0.4559, + "step": 9654 + }, + { + "epoch": 0.39840719650078404, + "grad_norm": 2.9338500691400204, + "learning_rate": 2.0531708722406246e-06, + "loss": 0.5529, + "step": 9655 + }, + { + "epoch": 0.39844846084014196, + "grad_norm": 6.043396121119698, + "learning_rate": 2.0529845215503268e-06, + "loss": 0.5317, + "step": 9656 + }, + { + "epoch": 0.3984897251794999, + "grad_norm": 4.146592583372296, + "learning_rate": 2.0527981609823415e-06, + "loss": 0.5377, + "step": 9657 + }, + { + "epoch": 0.3985309895188578, + "grad_norm": 4.4562320290709145, + "learning_rate": 2.052611790539999e-06, + "loss": 0.4991, + "step": 9658 + }, + { + "epoch": 0.3985722538582157, + "grad_norm": 3.0951769448138022, + "learning_rate": 2.052425410226627e-06, + "loss": 0.5181, + "step": 9659 + }, + { + "epoch": 0.3986135181975737, + "grad_norm": 1.8493106920720175, + "learning_rate": 2.0522390200455557e-06, + "loss": 0.5408, + "step": 9660 + }, + { + "epoch": 0.3986547825369316, + "grad_norm": 2.84922337057054, + "learning_rate": 2.0520526200001134e-06, + "loss": 0.5198, + "step": 9661 + }, + { + "epoch": 0.3986960468762895, + "grad_norm": 3.4065440913723966, + "learning_rate": 2.0518662100936304e-06, + "loss": 0.5389, + "step": 9662 + }, + { + "epoch": 0.3987373112156474, + "grad_norm": 3.2731695540441628, + "learning_rate": 2.0516797903294365e-06, + "loss": 0.515, + "step": 9663 + }, + { + "epoch": 0.39877857555500534, + "grad_norm": 3.6124217733928154, + "learning_rate": 2.0514933607108616e-06, + "loss": 0.5295, + "step": 9664 + }, + { + "epoch": 0.3988198398943633, + "grad_norm": 6.392581813436827, + "learning_rate": 2.0513069212412356e-06, + "loss": 0.5631, + "step": 9665 + }, + { + "epoch": 0.39886110423372123, + "grad_norm": 2.0231578837200908, + "learning_rate": 2.051120471923889e-06, + "loss": 0.5279, + "step": 9666 + }, + { + "epoch": 0.39890236857307915, + "grad_norm": 2.6806958008353736, + "learning_rate": 2.0509340127621516e-06, + "loss": 0.5215, + "step": 9667 + }, + { + "epoch": 0.39894363291243706, + "grad_norm": 2.2378166351781235, + "learning_rate": 2.0507475437593548e-06, + "loss": 0.5289, + "step": 9668 + }, + { + "epoch": 0.398984897251795, + "grad_norm": 6.25799839811745, + "learning_rate": 2.0505610649188284e-06, + "loss": 0.519, + "step": 9669 + }, + { + "epoch": 0.39902616159115295, + "grad_norm": 3.121557409999471, + "learning_rate": 2.050374576243905e-06, + "loss": 0.5238, + "step": 9670 + }, + { + "epoch": 0.39906742593051087, + "grad_norm": 2.3693066998945214, + "learning_rate": 2.0501880777379145e-06, + "loss": 0.5314, + "step": 9671 + }, + { + "epoch": 0.3991086902698688, + "grad_norm": 4.939994055425459, + "learning_rate": 2.050001569404189e-06, + "loss": 0.5483, + "step": 9672 + }, + { + "epoch": 0.3991499546092267, + "grad_norm": 2.9020924171435603, + "learning_rate": 2.0498150512460587e-06, + "loss": 0.5064, + "step": 9673 + }, + { + "epoch": 0.3991912189485846, + "grad_norm": 2.9864663777732563, + "learning_rate": 2.0496285232668565e-06, + "loss": 0.4994, + "step": 9674 + }, + { + "epoch": 0.3992324832879426, + "grad_norm": 3.7189288791114516, + "learning_rate": 2.0494419854699144e-06, + "loss": 0.4998, + "step": 9675 + }, + { + "epoch": 0.3992737476273005, + "grad_norm": 4.6035056543208945, + "learning_rate": 2.0492554378585636e-06, + "loss": 0.4947, + "step": 9676 + }, + { + "epoch": 0.3993150119666584, + "grad_norm": 5.034527371641065, + "learning_rate": 2.049068880436136e-06, + "loss": 0.5221, + "step": 9677 + }, + { + "epoch": 0.39935627630601633, + "grad_norm": 6.756511732549395, + "learning_rate": 2.048882313205966e-06, + "loss": 0.5091, + "step": 9678 + }, + { + "epoch": 0.39939754064537425, + "grad_norm": 2.598774334787324, + "learning_rate": 2.048695736171384e-06, + "loss": 0.5188, + "step": 9679 + }, + { + "epoch": 0.3994388049847322, + "grad_norm": 10.071254554890551, + "learning_rate": 2.048509149335723e-06, + "loss": 0.5137, + "step": 9680 + }, + { + "epoch": 0.39948006932409014, + "grad_norm": 1.719721461833967, + "learning_rate": 2.048322552702318e-06, + "loss": 0.55, + "step": 9681 + }, + { + "epoch": 0.39952133366344805, + "grad_norm": 2.03211973093624, + "learning_rate": 2.048135946274499e-06, + "loss": 0.5009, + "step": 9682 + }, + { + "epoch": 0.39956259800280597, + "grad_norm": 3.5867779740933843, + "learning_rate": 2.0479493300556013e-06, + "loss": 0.6044, + "step": 9683 + }, + { + "epoch": 0.3996038623421639, + "grad_norm": 8.667760020731658, + "learning_rate": 2.047762704048958e-06, + "loss": 0.5624, + "step": 9684 + }, + { + "epoch": 0.39964512668152186, + "grad_norm": 2.2248422963647685, + "learning_rate": 2.047576068257902e-06, + "loss": 0.4942, + "step": 9685 + }, + { + "epoch": 0.39968639102087977, + "grad_norm": 4.161815632769843, + "learning_rate": 2.047389422685768e-06, + "loss": 0.5048, + "step": 9686 + }, + { + "epoch": 0.3997276553602377, + "grad_norm": 3.677502740330938, + "learning_rate": 2.0472027673358897e-06, + "loss": 0.5073, + "step": 9687 + }, + { + "epoch": 0.3997689196995956, + "grad_norm": 8.310849610799753, + "learning_rate": 2.047016102211601e-06, + "loss": 0.5697, + "step": 9688 + }, + { + "epoch": 0.3998101840389535, + "grad_norm": 4.346635609027994, + "learning_rate": 2.046829427316236e-06, + "loss": 0.5416, + "step": 9689 + }, + { + "epoch": 0.39985144837831144, + "grad_norm": 2.550549832082867, + "learning_rate": 2.0466427426531296e-06, + "loss": 0.5684, + "step": 9690 + }, + { + "epoch": 0.3998927127176694, + "grad_norm": 3.343398643473149, + "learning_rate": 2.046456048225617e-06, + "loss": 0.5192, + "step": 9691 + }, + { + "epoch": 0.3999339770570273, + "grad_norm": 9.209454810707829, + "learning_rate": 2.0462693440370314e-06, + "loss": 0.5467, + "step": 9692 + }, + { + "epoch": 0.39997524139638524, + "grad_norm": 2.6049978605343873, + "learning_rate": 2.046082630090709e-06, + "loss": 0.506, + "step": 9693 + }, + { + "epoch": 0.40001650573574316, + "grad_norm": 4.974348259511786, + "learning_rate": 2.045895906389985e-06, + "loss": 0.5721, + "step": 9694 + }, + { + "epoch": 0.40005777007510107, + "grad_norm": 2.281962283748475, + "learning_rate": 2.0457091729381944e-06, + "loss": 0.5438, + "step": 9695 + }, + { + "epoch": 0.40009903441445904, + "grad_norm": 4.619528425472188, + "learning_rate": 2.045522429738672e-06, + "loss": 0.5054, + "step": 9696 + }, + { + "epoch": 0.40014029875381696, + "grad_norm": 8.062649750400771, + "learning_rate": 2.0453356767947553e-06, + "loss": 0.5354, + "step": 9697 + }, + { + "epoch": 0.4001815630931749, + "grad_norm": 2.725196229710375, + "learning_rate": 2.045148914109779e-06, + "loss": 0.5444, + "step": 9698 + }, + { + "epoch": 0.4002228274325328, + "grad_norm": 3.743635944504283, + "learning_rate": 2.0449621416870794e-06, + "loss": 0.5002, + "step": 9699 + }, + { + "epoch": 0.4002640917718907, + "grad_norm": 3.3046001380959824, + "learning_rate": 2.0447753595299927e-06, + "loss": 0.5701, + "step": 9700 + }, + { + "epoch": 0.4003053561112487, + "grad_norm": 2.2772822670764565, + "learning_rate": 2.0445885676418553e-06, + "loss": 0.5158, + "step": 9701 + }, + { + "epoch": 0.4003466204506066, + "grad_norm": 2.319213668919246, + "learning_rate": 2.044401766026003e-06, + "loss": 0.5092, + "step": 9702 + }, + { + "epoch": 0.4003878847899645, + "grad_norm": 3.2592905238327985, + "learning_rate": 2.0442149546857736e-06, + "loss": 0.5429, + "step": 9703 + }, + { + "epoch": 0.4004291491293224, + "grad_norm": 3.331729644430761, + "learning_rate": 2.0440281336245044e-06, + "loss": 0.5814, + "step": 9704 + }, + { + "epoch": 0.40047041346868034, + "grad_norm": 6.768076163033936, + "learning_rate": 2.043841302845531e-06, + "loss": 0.5604, + "step": 9705 + }, + { + "epoch": 0.4005116778080383, + "grad_norm": 7.1687999487984975, + "learning_rate": 2.043654462352192e-06, + "loss": 0.5382, + "step": 9706 + }, + { + "epoch": 0.40055294214739623, + "grad_norm": 3.1340963312692476, + "learning_rate": 2.043467612147824e-06, + "loss": 0.5336, + "step": 9707 + }, + { + "epoch": 0.40059420648675415, + "grad_norm": 2.367668640045082, + "learning_rate": 2.043280752235765e-06, + "loss": 0.4832, + "step": 9708 + }, + { + "epoch": 0.40063547082611206, + "grad_norm": 2.389913106901115, + "learning_rate": 2.0430938826193526e-06, + "loss": 0.5689, + "step": 9709 + }, + { + "epoch": 0.40067673516547, + "grad_norm": 5.045882379527627, + "learning_rate": 2.0429070033019245e-06, + "loss": 0.5231, + "step": 9710 + }, + { + "epoch": 0.40071799950482795, + "grad_norm": 3.5044431194983847, + "learning_rate": 2.0427201142868196e-06, + "loss": 0.5118, + "step": 9711 + }, + { + "epoch": 0.40075926384418586, + "grad_norm": 4.10005998992017, + "learning_rate": 2.042533215577376e-06, + "loss": 0.6429, + "step": 9712 + }, + { + "epoch": 0.4008005281835438, + "grad_norm": 3.711098004045231, + "learning_rate": 2.042346307176931e-06, + "loss": 0.5619, + "step": 9713 + }, + { + "epoch": 0.4008417925229017, + "grad_norm": 2.324751505984981, + "learning_rate": 2.042159389088825e-06, + "loss": 0.4444, + "step": 9714 + }, + { + "epoch": 0.4008830568622596, + "grad_norm": 4.965110364002371, + "learning_rate": 2.041972461316396e-06, + "loss": 0.5691, + "step": 9715 + }, + { + "epoch": 0.4009243212016176, + "grad_norm": 5.122297701427689, + "learning_rate": 2.0417855238629828e-06, + "loss": 0.552, + "step": 9716 + }, + { + "epoch": 0.4009655855409755, + "grad_norm": 17.37957726196347, + "learning_rate": 2.0415985767319254e-06, + "loss": 0.5614, + "step": 9717 + }, + { + "epoch": 0.4010068498803334, + "grad_norm": 3.3002212333529473, + "learning_rate": 2.041411619926562e-06, + "loss": 0.5206, + "step": 9718 + }, + { + "epoch": 0.40104811421969133, + "grad_norm": 16.51953149888159, + "learning_rate": 2.0412246534502327e-06, + "loss": 0.5008, + "step": 9719 + }, + { + "epoch": 0.40108937855904925, + "grad_norm": 4.531581996377769, + "learning_rate": 2.041037677306277e-06, + "loss": 0.5351, + "step": 9720 + }, + { + "epoch": 0.4011306428984072, + "grad_norm": 3.792273630675477, + "learning_rate": 2.040850691498035e-06, + "loss": 0.495, + "step": 9721 + }, + { + "epoch": 0.40117190723776514, + "grad_norm": 3.221716626981324, + "learning_rate": 2.0406636960288472e-06, + "loss": 0.5439, + "step": 9722 + }, + { + "epoch": 0.40121317157712305, + "grad_norm": 2.5378809081240084, + "learning_rate": 2.0404766909020525e-06, + "loss": 0.5368, + "step": 9723 + }, + { + "epoch": 0.40125443591648097, + "grad_norm": 5.557963107680342, + "learning_rate": 2.0402896761209927e-06, + "loss": 0.5253, + "step": 9724 + }, + { + "epoch": 0.4012957002558389, + "grad_norm": 2.9444976973299855, + "learning_rate": 2.0401026516890072e-06, + "loss": 0.5394, + "step": 9725 + }, + { + "epoch": 0.40133696459519685, + "grad_norm": 3.695241512045341, + "learning_rate": 2.0399156176094376e-06, + "loss": 0.502, + "step": 9726 + }, + { + "epoch": 0.40137822893455477, + "grad_norm": 3.4756107220343098, + "learning_rate": 2.0397285738856237e-06, + "loss": 0.5834, + "step": 9727 + }, + { + "epoch": 0.4014194932739127, + "grad_norm": 3.167165247130034, + "learning_rate": 2.039541520520908e-06, + "loss": 0.5893, + "step": 9728 + }, + { + "epoch": 0.4014607576132706, + "grad_norm": 2.8830045232391917, + "learning_rate": 2.0393544575186304e-06, + "loss": 0.5351, + "step": 9729 + }, + { + "epoch": 0.4015020219526285, + "grad_norm": 3.871863162128049, + "learning_rate": 2.0391673848821333e-06, + "loss": 0.566, + "step": 9730 + }, + { + "epoch": 0.4015432862919865, + "grad_norm": 3.529922090912116, + "learning_rate": 2.0389803026147585e-06, + "loss": 0.5534, + "step": 9731 + }, + { + "epoch": 0.4015845506313444, + "grad_norm": 2.4402267003287688, + "learning_rate": 2.0387932107198467e-06, + "loss": 0.5509, + "step": 9732 + }, + { + "epoch": 0.4016258149707023, + "grad_norm": 2.3809991018607954, + "learning_rate": 2.0386061092007395e-06, + "loss": 0.5102, + "step": 9733 + }, + { + "epoch": 0.40166707931006024, + "grad_norm": 2.136166043745782, + "learning_rate": 2.038418998060781e-06, + "loss": 0.5709, + "step": 9734 + }, + { + "epoch": 0.40170834364941815, + "grad_norm": 4.249746053501179, + "learning_rate": 2.038231877303311e-06, + "loss": 0.6006, + "step": 9735 + }, + { + "epoch": 0.4017496079887761, + "grad_norm": 9.624250149025672, + "learning_rate": 2.0380447469316745e-06, + "loss": 0.5046, + "step": 9736 + }, + { + "epoch": 0.40179087232813404, + "grad_norm": 13.54529369682634, + "learning_rate": 2.0378576069492124e-06, + "loss": 0.5312, + "step": 9737 + }, + { + "epoch": 0.40183213666749196, + "grad_norm": 4.215428715916819, + "learning_rate": 2.037670457359268e-06, + "loss": 0.5176, + "step": 9738 + }, + { + "epoch": 0.4018734010068499, + "grad_norm": 2.113761651467645, + "learning_rate": 2.0374832981651834e-06, + "loss": 0.5366, + "step": 9739 + }, + { + "epoch": 0.4019146653462078, + "grad_norm": 3.3895927764289366, + "learning_rate": 2.0372961293703035e-06, + "loss": 0.5608, + "step": 9740 + }, + { + "epoch": 0.40195592968556576, + "grad_norm": 3.9548305874258594, + "learning_rate": 2.0371089509779703e-06, + "loss": 0.5001, + "step": 9741 + }, + { + "epoch": 0.4019971940249237, + "grad_norm": 2.411950233627514, + "learning_rate": 2.0369217629915278e-06, + "loss": 0.565, + "step": 9742 + }, + { + "epoch": 0.4020384583642816, + "grad_norm": 5.573825396315026, + "learning_rate": 2.036734565414319e-06, + "loss": 0.5043, + "step": 9743 + }, + { + "epoch": 0.4020797227036395, + "grad_norm": 6.6677297160111895, + "learning_rate": 2.0365473582496887e-06, + "loss": 0.4875, + "step": 9744 + }, + { + "epoch": 0.4021209870429974, + "grad_norm": 2.6621195740546137, + "learning_rate": 2.0363601415009796e-06, + "loss": 0.5523, + "step": 9745 + }, + { + "epoch": 0.4021622513823554, + "grad_norm": 2.0915554821142677, + "learning_rate": 2.036172915171537e-06, + "loss": 0.5514, + "step": 9746 + }, + { + "epoch": 0.4022035157217133, + "grad_norm": 2.5949327383147853, + "learning_rate": 2.035985679264705e-06, + "loss": 0.5341, + "step": 9747 + }, + { + "epoch": 0.4022447800610712, + "grad_norm": 6.452234354312714, + "learning_rate": 2.0357984337838275e-06, + "loss": 0.5743, + "step": 9748 + }, + { + "epoch": 0.40228604440042914, + "grad_norm": 3.048799177251574, + "learning_rate": 2.03561117873225e-06, + "loss": 0.5379, + "step": 9749 + }, + { + "epoch": 0.40232730873978706, + "grad_norm": 3.6442138258114776, + "learning_rate": 2.035423914113317e-06, + "loss": 0.5404, + "step": 9750 + }, + { + "epoch": 0.402368573079145, + "grad_norm": 3.3418551892192143, + "learning_rate": 2.0352366399303734e-06, + "loss": 0.4524, + "step": 9751 + }, + { + "epoch": 0.40240983741850295, + "grad_norm": 3.0030578451630467, + "learning_rate": 2.035049356186764e-06, + "loss": 0.5723, + "step": 9752 + }, + { + "epoch": 0.40245110175786086, + "grad_norm": 5.081385438226402, + "learning_rate": 2.034862062885835e-06, + "loss": 0.5586, + "step": 9753 + }, + { + "epoch": 0.4024923660972188, + "grad_norm": 4.758535418116538, + "learning_rate": 2.0346747600309316e-06, + "loss": 0.514, + "step": 9754 + }, + { + "epoch": 0.4025336304365767, + "grad_norm": 5.456232587199318, + "learning_rate": 2.034487447625399e-06, + "loss": 0.5549, + "step": 9755 + }, + { + "epoch": 0.4025748947759346, + "grad_norm": 4.728838166998933, + "learning_rate": 2.034300125672584e-06, + "loss": 0.5227, + "step": 9756 + }, + { + "epoch": 0.4026161591152926, + "grad_norm": 3.1310042607935262, + "learning_rate": 2.0341127941758328e-06, + "loss": 0.5642, + "step": 9757 + }, + { + "epoch": 0.4026574234546505, + "grad_norm": 2.4931486219074257, + "learning_rate": 2.0339254531384895e-06, + "loss": 0.5439, + "step": 9758 + }, + { + "epoch": 0.4026986877940084, + "grad_norm": 2.518286436221257, + "learning_rate": 2.033738102563903e-06, + "loss": 0.5663, + "step": 9759 + }, + { + "epoch": 0.40273995213336633, + "grad_norm": 2.6274055002950525, + "learning_rate": 2.033550742455418e-06, + "loss": 0.5583, + "step": 9760 + }, + { + "epoch": 0.40278121647272425, + "grad_norm": 2.745223042523933, + "learning_rate": 2.0333633728163826e-06, + "loss": 0.5627, + "step": 9761 + }, + { + "epoch": 0.4028224808120822, + "grad_norm": 6.989827651074141, + "learning_rate": 2.0331759936501426e-06, + "loss": 0.5203, + "step": 9762 + }, + { + "epoch": 0.40286374515144013, + "grad_norm": 3.8128664521008937, + "learning_rate": 2.0329886049600462e-06, + "loss": 0.5149, + "step": 9763 + }, + { + "epoch": 0.40290500949079805, + "grad_norm": 2.7679986087654713, + "learning_rate": 2.0328012067494393e-06, + "loss": 0.5393, + "step": 9764 + }, + { + "epoch": 0.40294627383015597, + "grad_norm": 9.021327147276276, + "learning_rate": 2.0326137990216705e-06, + "loss": 0.5659, + "step": 9765 + }, + { + "epoch": 0.4029875381695139, + "grad_norm": 5.031680252920477, + "learning_rate": 2.032426381780086e-06, + "loss": 0.5233, + "step": 9766 + }, + { + "epoch": 0.40302880250887185, + "grad_norm": 3.355609719760301, + "learning_rate": 2.0322389550280353e-06, + "loss": 0.5584, + "step": 9767 + }, + { + "epoch": 0.40307006684822977, + "grad_norm": 3.703490459021755, + "learning_rate": 2.032051518768865e-06, + "loss": 0.5049, + "step": 9768 + }, + { + "epoch": 0.4031113311875877, + "grad_norm": 3.467822260123671, + "learning_rate": 2.0318640730059237e-06, + "loss": 0.5364, + "step": 9769 + }, + { + "epoch": 0.4031525955269456, + "grad_norm": 8.184451529468681, + "learning_rate": 2.03167661774256e-06, + "loss": 0.5728, + "step": 9770 + }, + { + "epoch": 0.4031938598663035, + "grad_norm": 2.891467548853537, + "learning_rate": 2.031489152982121e-06, + "loss": 0.5144, + "step": 9771 + }, + { + "epoch": 0.4032351242056615, + "grad_norm": 4.257943600841574, + "learning_rate": 2.031301678727957e-06, + "loss": 0.54, + "step": 9772 + }, + { + "epoch": 0.4032763885450194, + "grad_norm": 3.098090948799596, + "learning_rate": 2.0311141949834154e-06, + "loss": 0.511, + "step": 9773 + }, + { + "epoch": 0.4033176528843773, + "grad_norm": 11.090633642061654, + "learning_rate": 2.030926701751845e-06, + "loss": 0.5741, + "step": 9774 + }, + { + "epoch": 0.40335891722373524, + "grad_norm": 6.039786403632342, + "learning_rate": 2.0307391990365964e-06, + "loss": 0.5142, + "step": 9775 + }, + { + "epoch": 0.40340018156309315, + "grad_norm": 2.181712855805826, + "learning_rate": 2.0305516868410177e-06, + "loss": 0.5451, + "step": 9776 + }, + { + "epoch": 0.4034414459024511, + "grad_norm": 9.30111274943551, + "learning_rate": 2.0303641651684596e-06, + "loss": 0.5684, + "step": 9777 + }, + { + "epoch": 0.40348271024180904, + "grad_norm": 6.344044026247804, + "learning_rate": 2.030176634022269e-06, + "loss": 0.5486, + "step": 9778 + }, + { + "epoch": 0.40352397458116696, + "grad_norm": 2.2507366807638616, + "learning_rate": 2.029989093405799e-06, + "loss": 0.5922, + "step": 9779 + }, + { + "epoch": 0.40356523892052487, + "grad_norm": 5.357947469838538, + "learning_rate": 2.029801543322397e-06, + "loss": 0.5441, + "step": 9780 + }, + { + "epoch": 0.4036065032598828, + "grad_norm": 3.238623349703619, + "learning_rate": 2.0296139837754142e-06, + "loss": 0.5464, + "step": 9781 + }, + { + "epoch": 0.40364776759924076, + "grad_norm": 1.848910281430605, + "learning_rate": 2.0294264147682008e-06, + "loss": 0.5109, + "step": 9782 + }, + { + "epoch": 0.4036890319385987, + "grad_norm": 4.226306708616129, + "learning_rate": 2.0292388363041075e-06, + "loss": 0.4999, + "step": 9783 + }, + { + "epoch": 0.4037302962779566, + "grad_norm": 3.1506904673270784, + "learning_rate": 2.0290512483864846e-06, + "loss": 0.5346, + "step": 9784 + }, + { + "epoch": 0.4037715606173145, + "grad_norm": 10.91644373480505, + "learning_rate": 2.0288636510186827e-06, + "loss": 0.5369, + "step": 9785 + }, + { + "epoch": 0.4038128249566724, + "grad_norm": 2.2569485260307025, + "learning_rate": 2.028676044204053e-06, + "loss": 0.5136, + "step": 9786 + }, + { + "epoch": 0.4038540892960304, + "grad_norm": 5.843282010055226, + "learning_rate": 2.0284884279459467e-06, + "loss": 0.5678, + "step": 9787 + }, + { + "epoch": 0.4038953536353883, + "grad_norm": 3.1353867769781876, + "learning_rate": 2.0283008022477146e-06, + "loss": 0.5292, + "step": 9788 + }, + { + "epoch": 0.4039366179747462, + "grad_norm": 1.7880587280247604, + "learning_rate": 2.0281131671127093e-06, + "loss": 0.5006, + "step": 9789 + }, + { + "epoch": 0.40397788231410414, + "grad_norm": 1.8634736315869291, + "learning_rate": 2.027925522544281e-06, + "loss": 0.5405, + "step": 9790 + }, + { + "epoch": 0.40401914665346206, + "grad_norm": 2.446859818300465, + "learning_rate": 2.0277378685457825e-06, + "loss": 0.5381, + "step": 9791 + }, + { + "epoch": 0.40406041099282003, + "grad_norm": 4.051517613869881, + "learning_rate": 2.027550205120566e-06, + "loss": 0.5057, + "step": 9792 + }, + { + "epoch": 0.40410167533217795, + "grad_norm": 5.447729208829418, + "learning_rate": 2.0273625322719826e-06, + "loss": 0.578, + "step": 9793 + }, + { + "epoch": 0.40414293967153586, + "grad_norm": 7.040666798015182, + "learning_rate": 2.0271748500033846e-06, + "loss": 0.5257, + "step": 9794 + }, + { + "epoch": 0.4041842040108938, + "grad_norm": 3.157938065692045, + "learning_rate": 2.0269871583181257e-06, + "loss": 0.5324, + "step": 9795 + }, + { + "epoch": 0.4042254683502517, + "grad_norm": 2.8889548579933426, + "learning_rate": 2.0267994572195576e-06, + "loss": 0.5846, + "step": 9796 + }, + { + "epoch": 0.40426673268960966, + "grad_norm": 4.633431160847541, + "learning_rate": 2.026611746711033e-06, + "loss": 0.5559, + "step": 9797 + }, + { + "epoch": 0.4043079970289676, + "grad_norm": 2.947993254946051, + "learning_rate": 2.0264240267959064e-06, + "loss": 0.5457, + "step": 9798 + }, + { + "epoch": 0.4043492613683255, + "grad_norm": 2.7506136267370924, + "learning_rate": 2.0262362974775284e-06, + "loss": 0.4815, + "step": 9799 + }, + { + "epoch": 0.4043905257076834, + "grad_norm": 6.493637939419714, + "learning_rate": 2.0260485587592544e-06, + "loss": 0.524, + "step": 9800 + }, + { + "epoch": 0.40443179004704133, + "grad_norm": 3.250892262022903, + "learning_rate": 2.025860810644437e-06, + "loss": 0.5279, + "step": 9801 + }, + { + "epoch": 0.4044730543863993, + "grad_norm": 2.0157919868784706, + "learning_rate": 2.02567305313643e-06, + "loss": 0.5625, + "step": 9802 + }, + { + "epoch": 0.4045143187257572, + "grad_norm": 3.3775845654021315, + "learning_rate": 2.0254852862385877e-06, + "loss": 0.5571, + "step": 9803 + }, + { + "epoch": 0.40455558306511513, + "grad_norm": 3.691732930422441, + "learning_rate": 2.0252975099542627e-06, + "loss": 0.5315, + "step": 9804 + }, + { + "epoch": 0.40459684740447305, + "grad_norm": 2.616253298873462, + "learning_rate": 2.0251097242868104e-06, + "loss": 0.5452, + "step": 9805 + }, + { + "epoch": 0.40463811174383096, + "grad_norm": 2.0518714034899155, + "learning_rate": 2.0249219292395855e-06, + "loss": 0.5554, + "step": 9806 + }, + { + "epoch": 0.40467937608318894, + "grad_norm": 3.025992508285632, + "learning_rate": 2.024734124815941e-06, + "loss": 0.5964, + "step": 9807 + }, + { + "epoch": 0.40472064042254685, + "grad_norm": 3.408320840842935, + "learning_rate": 2.0245463110192325e-06, + "loss": 0.567, + "step": 9808 + }, + { + "epoch": 0.40476190476190477, + "grad_norm": 2.749567445642318, + "learning_rate": 2.024358487852815e-06, + "loss": 0.5462, + "step": 9809 + }, + { + "epoch": 0.4048031691012627, + "grad_norm": 1.9004200071242938, + "learning_rate": 2.024170655320043e-06, + "loss": 0.5031, + "step": 9810 + }, + { + "epoch": 0.4048444334406206, + "grad_norm": 6.94145812189516, + "learning_rate": 2.023982813424272e-06, + "loss": 0.5505, + "step": 9811 + }, + { + "epoch": 0.4048856977799785, + "grad_norm": 2.63325700499718, + "learning_rate": 2.0237949621688566e-06, + "loss": 0.5339, + "step": 9812 + }, + { + "epoch": 0.4049269621193365, + "grad_norm": 2.825921523886083, + "learning_rate": 2.023607101557153e-06, + "loss": 0.5645, + "step": 9813 + }, + { + "epoch": 0.4049682264586944, + "grad_norm": 2.961053268338715, + "learning_rate": 2.0234192315925173e-06, + "loss": 0.5451, + "step": 9814 + }, + { + "epoch": 0.4050094907980523, + "grad_norm": 2.824282721563104, + "learning_rate": 2.023231352278304e-06, + "loss": 0.5657, + "step": 9815 + }, + { + "epoch": 0.40505075513741023, + "grad_norm": 2.4142871904091168, + "learning_rate": 2.0230434636178705e-06, + "loss": 0.562, + "step": 9816 + }, + { + "epoch": 0.40509201947676815, + "grad_norm": 2.50234171688736, + "learning_rate": 2.0228555656145717e-06, + "loss": 0.5508, + "step": 9817 + }, + { + "epoch": 0.4051332838161261, + "grad_norm": 5.243812920954619, + "learning_rate": 2.022667658271765e-06, + "loss": 0.5607, + "step": 9818 + }, + { + "epoch": 0.40517454815548404, + "grad_norm": 3.1165030060803764, + "learning_rate": 2.0224797415928055e-06, + "loss": 0.4971, + "step": 9819 + }, + { + "epoch": 0.40521581249484195, + "grad_norm": 4.317948842534991, + "learning_rate": 2.022291815581052e-06, + "loss": 0.5738, + "step": 9820 + }, + { + "epoch": 0.40525707683419987, + "grad_norm": 74.9250337237379, + "learning_rate": 2.0221038802398587e-06, + "loss": 0.4983, + "step": 9821 + }, + { + "epoch": 0.4052983411735578, + "grad_norm": 2.3766015773711184, + "learning_rate": 2.0219159355725853e-06, + "loss": 0.5094, + "step": 9822 + }, + { + "epoch": 0.40533960551291576, + "grad_norm": 21.2333081791209, + "learning_rate": 2.0217279815825874e-06, + "loss": 0.5048, + "step": 9823 + }, + { + "epoch": 0.4053808698522737, + "grad_norm": 4.449043352215583, + "learning_rate": 2.0215400182732225e-06, + "loss": 0.518, + "step": 9824 + }, + { + "epoch": 0.4054221341916316, + "grad_norm": 2.685760991286405, + "learning_rate": 2.0213520456478475e-06, + "loss": 0.4674, + "step": 9825 + }, + { + "epoch": 0.4054633985309895, + "grad_norm": 2.4477088619052414, + "learning_rate": 2.0211640637098212e-06, + "loss": 0.5101, + "step": 9826 + }, + { + "epoch": 0.4055046628703474, + "grad_norm": 4.74105344199534, + "learning_rate": 2.0209760724625014e-06, + "loss": 0.5412, + "step": 9827 + }, + { + "epoch": 0.4055459272097054, + "grad_norm": 3.061507266992268, + "learning_rate": 2.020788071909245e-06, + "loss": 0.5055, + "step": 9828 + }, + { + "epoch": 0.4055871915490633, + "grad_norm": 7.9782110252530085, + "learning_rate": 2.0206000620534116e-06, + "loss": 0.5444, + "step": 9829 + }, + { + "epoch": 0.4056284558884212, + "grad_norm": 2.055993924978712, + "learning_rate": 2.020412042898358e-06, + "loss": 0.525, + "step": 9830 + }, + { + "epoch": 0.40566972022777914, + "grad_norm": 4.846570651120075, + "learning_rate": 2.0202240144474437e-06, + "loss": 0.5513, + "step": 9831 + }, + { + "epoch": 0.40571098456713706, + "grad_norm": 13.705991044771627, + "learning_rate": 2.0200359767040273e-06, + "loss": 0.5506, + "step": 9832 + }, + { + "epoch": 0.405752248906495, + "grad_norm": 3.778288586018576, + "learning_rate": 2.019847929671467e-06, + "loss": 0.5282, + "step": 9833 + }, + { + "epoch": 0.40579351324585294, + "grad_norm": 5.223347910443693, + "learning_rate": 2.0196598733531225e-06, + "loss": 0.5165, + "step": 9834 + }, + { + "epoch": 0.40583477758521086, + "grad_norm": 3.6784217097719716, + "learning_rate": 2.0194718077523522e-06, + "loss": 0.5583, + "step": 9835 + }, + { + "epoch": 0.4058760419245688, + "grad_norm": 3.4142925516417386, + "learning_rate": 2.0192837328725165e-06, + "loss": 0.5308, + "step": 9836 + }, + { + "epoch": 0.4059173062639267, + "grad_norm": 6.398731621538693, + "learning_rate": 2.019095648716974e-06, + "loss": 0.583, + "step": 9837 + }, + { + "epoch": 0.40595857060328466, + "grad_norm": 49.0820536451318, + "learning_rate": 2.018907555289085e-06, + "loss": 0.5236, + "step": 9838 + }, + { + "epoch": 0.4059998349426426, + "grad_norm": 4.6229152367460875, + "learning_rate": 2.0187194525922087e-06, + "loss": 0.5198, + "step": 9839 + }, + { + "epoch": 0.4060410992820005, + "grad_norm": 3.215881492895259, + "learning_rate": 2.018531340629705e-06, + "loss": 0.4915, + "step": 9840 + }, + { + "epoch": 0.4060823636213584, + "grad_norm": 4.836309349163634, + "learning_rate": 2.0183432194049347e-06, + "loss": 0.5239, + "step": 9841 + }, + { + "epoch": 0.4061236279607163, + "grad_norm": 3.85757506504951, + "learning_rate": 2.018155088921258e-06, + "loss": 0.4991, + "step": 9842 + }, + { + "epoch": 0.4061648923000743, + "grad_norm": 26.782760704429602, + "learning_rate": 2.0179669491820353e-06, + "loss": 0.5266, + "step": 9843 + }, + { + "epoch": 0.4062061566394322, + "grad_norm": 2.8736075672491683, + "learning_rate": 2.0177788001906266e-06, + "loss": 0.5798, + "step": 9844 + }, + { + "epoch": 0.40624742097879013, + "grad_norm": 19.574533771677125, + "learning_rate": 2.0175906419503936e-06, + "loss": 0.5449, + "step": 9845 + }, + { + "epoch": 0.40628868531814805, + "grad_norm": 3.4375031575947017, + "learning_rate": 2.0174024744646965e-06, + "loss": 0.5557, + "step": 9846 + }, + { + "epoch": 0.40632994965750596, + "grad_norm": 2.1856462142660593, + "learning_rate": 2.0172142977368975e-06, + "loss": 0.527, + "step": 9847 + }, + { + "epoch": 0.40637121399686393, + "grad_norm": 2.6112068366844916, + "learning_rate": 2.017026111770357e-06, + "loss": 0.5216, + "step": 9848 + }, + { + "epoch": 0.40641247833622185, + "grad_norm": 3.32032301716782, + "learning_rate": 2.016837916568437e-06, + "loss": 0.5254, + "step": 9849 + }, + { + "epoch": 0.40645374267557977, + "grad_norm": 4.425237724238917, + "learning_rate": 2.016649712134498e-06, + "loss": 0.5269, + "step": 9850 + }, + { + "epoch": 0.4064950070149377, + "grad_norm": 3.1256735738094337, + "learning_rate": 2.0164614984719036e-06, + "loss": 0.5687, + "step": 9851 + }, + { + "epoch": 0.4065362713542956, + "grad_norm": 2.5145366856674864, + "learning_rate": 2.016273275584014e-06, + "loss": 0.4831, + "step": 9852 + }, + { + "epoch": 0.40657753569365357, + "grad_norm": 3.844044990708683, + "learning_rate": 2.016085043474193e-06, + "loss": 0.5001, + "step": 9853 + }, + { + "epoch": 0.4066188000330115, + "grad_norm": 4.005101735570297, + "learning_rate": 2.0158968021458016e-06, + "loss": 0.5274, + "step": 9854 + }, + { + "epoch": 0.4066600643723694, + "grad_norm": 4.989612136475852, + "learning_rate": 2.0157085516022034e-06, + "loss": 0.4942, + "step": 9855 + }, + { + "epoch": 0.4067013287117273, + "grad_norm": 2.6884194337477725, + "learning_rate": 2.0155202918467594e-06, + "loss": 0.5602, + "step": 9856 + }, + { + "epoch": 0.40674259305108523, + "grad_norm": 2.498195626602255, + "learning_rate": 2.015332022882834e-06, + "loss": 0.5752, + "step": 9857 + }, + { + "epoch": 0.4067838573904432, + "grad_norm": 7.0859583882135055, + "learning_rate": 2.015143744713789e-06, + "loss": 0.5067, + "step": 9858 + }, + { + "epoch": 0.4068251217298011, + "grad_norm": 2.5124444113887714, + "learning_rate": 2.0149554573429885e-06, + "loss": 0.5143, + "step": 9859 + }, + { + "epoch": 0.40686638606915904, + "grad_norm": 4.286562127359688, + "learning_rate": 2.014767160773795e-06, + "loss": 0.5944, + "step": 9860 + }, + { + "epoch": 0.40690765040851695, + "grad_norm": 2.7189057401692422, + "learning_rate": 2.0145788550095725e-06, + "loss": 0.5305, + "step": 9861 + }, + { + "epoch": 0.40694891474787487, + "grad_norm": 6.497424022476908, + "learning_rate": 2.0143905400536842e-06, + "loss": 0.5313, + "step": 9862 + }, + { + "epoch": 0.40699017908723284, + "grad_norm": 2.525142683282054, + "learning_rate": 2.0142022159094947e-06, + "loss": 0.5268, + "step": 9863 + }, + { + "epoch": 0.40703144342659076, + "grad_norm": 2.4014136440609852, + "learning_rate": 2.0140138825803665e-06, + "loss": 0.5416, + "step": 9864 + }, + { + "epoch": 0.40707270776594867, + "grad_norm": 2.9133519535702233, + "learning_rate": 2.0138255400696646e-06, + "loss": 0.4934, + "step": 9865 + }, + { + "epoch": 0.4071139721053066, + "grad_norm": 3.2264257619812957, + "learning_rate": 2.013637188380753e-06, + "loss": 0.5389, + "step": 9866 + }, + { + "epoch": 0.4071552364446645, + "grad_norm": 17.29027269204821, + "learning_rate": 2.0134488275169966e-06, + "loss": 0.5337, + "step": 9867 + }, + { + "epoch": 0.4071965007840225, + "grad_norm": 2.0181638110356865, + "learning_rate": 2.0132604574817596e-06, + "loss": 0.565, + "step": 9868 + }, + { + "epoch": 0.4072377651233804, + "grad_norm": 2.7363493675764476, + "learning_rate": 2.013072078278407e-06, + "loss": 0.5204, + "step": 9869 + }, + { + "epoch": 0.4072790294627383, + "grad_norm": 3.9383134312247408, + "learning_rate": 2.012883689910303e-06, + "loss": 0.5145, + "step": 9870 + }, + { + "epoch": 0.4073202938020962, + "grad_norm": 3.7018759612549665, + "learning_rate": 2.0126952923808142e-06, + "loss": 0.4916, + "step": 9871 + }, + { + "epoch": 0.40736155814145414, + "grad_norm": 2.658878015240135, + "learning_rate": 2.0125068856933046e-06, + "loss": 0.4336, + "step": 9872 + }, + { + "epoch": 0.40740282248081205, + "grad_norm": 6.682088436931851, + "learning_rate": 2.01231846985114e-06, + "loss": 0.5132, + "step": 9873 + }, + { + "epoch": 0.40744408682017, + "grad_norm": 1.8049961491678648, + "learning_rate": 2.012130044857685e-06, + "loss": 0.5144, + "step": 9874 + }, + { + "epoch": 0.40748535115952794, + "grad_norm": 2.1959617077272533, + "learning_rate": 2.011941610716307e-06, + "loss": 0.5169, + "step": 9875 + }, + { + "epoch": 0.40752661549888586, + "grad_norm": 3.182279272270814, + "learning_rate": 2.0117531674303715e-06, + "loss": 0.5295, + "step": 9876 + }, + { + "epoch": 0.4075678798382438, + "grad_norm": 2.7907759940309744, + "learning_rate": 2.0115647150032432e-06, + "loss": 0.4966, + "step": 9877 + }, + { + "epoch": 0.4076091441776017, + "grad_norm": 4.508526692377851, + "learning_rate": 2.0113762534382905e-06, + "loss": 0.525, + "step": 9878 + }, + { + "epoch": 0.40765040851695966, + "grad_norm": 2.52454924445145, + "learning_rate": 2.0111877827388777e-06, + "loss": 0.5519, + "step": 9879 + }, + { + "epoch": 0.4076916728563176, + "grad_norm": 2.7275571104542524, + "learning_rate": 2.0109993029083726e-06, + "loss": 0.5497, + "step": 9880 + }, + { + "epoch": 0.4077329371956755, + "grad_norm": 2.9399672597865987, + "learning_rate": 2.010810813950142e-06, + "loss": 0.5127, + "step": 9881 + }, + { + "epoch": 0.4077742015350334, + "grad_norm": 3.725103881195383, + "learning_rate": 2.0106223158675523e-06, + "loss": 0.6232, + "step": 9882 + }, + { + "epoch": 0.4078154658743913, + "grad_norm": 1.9402277339004834, + "learning_rate": 2.01043380866397e-06, + "loss": 0.5348, + "step": 9883 + }, + { + "epoch": 0.4078567302137493, + "grad_norm": 2.425898441603784, + "learning_rate": 2.010245292342764e-06, + "loss": 0.4955, + "step": 9884 + }, + { + "epoch": 0.4078979945531072, + "grad_norm": 2.970667314187023, + "learning_rate": 2.0100567669073e-06, + "loss": 0.4642, + "step": 9885 + }, + { + "epoch": 0.40793925889246513, + "grad_norm": 3.4570728151132197, + "learning_rate": 2.0098682323609457e-06, + "loss": 0.5035, + "step": 9886 + }, + { + "epoch": 0.40798052323182304, + "grad_norm": 5.872477316754227, + "learning_rate": 2.00967968870707e-06, + "loss": 0.5089, + "step": 9887 + }, + { + "epoch": 0.40802178757118096, + "grad_norm": 3.637878852347928, + "learning_rate": 2.00949113594904e-06, + "loss": 0.5366, + "step": 9888 + }, + { + "epoch": 0.40806305191053893, + "grad_norm": 2.4948540523113727, + "learning_rate": 2.0093025740902236e-06, + "loss": 0.598, + "step": 9889 + }, + { + "epoch": 0.40810431624989685, + "grad_norm": 3.4430373952600775, + "learning_rate": 2.0091140031339892e-06, + "loss": 0.5302, + "step": 9890 + }, + { + "epoch": 0.40814558058925476, + "grad_norm": 8.311496502767307, + "learning_rate": 2.008925423083705e-06, + "loss": 0.525, + "step": 9891 + }, + { + "epoch": 0.4081868449286127, + "grad_norm": 4.878963756961117, + "learning_rate": 2.00873683394274e-06, + "loss": 0.556, + "step": 9892 + }, + { + "epoch": 0.4082281092679706, + "grad_norm": 3.1707468730567627, + "learning_rate": 2.0085482357144618e-06, + "loss": 0.5353, + "step": 9893 + }, + { + "epoch": 0.40826937360732857, + "grad_norm": 6.618508172058683, + "learning_rate": 2.0083596284022405e-06, + "loss": 0.4501, + "step": 9894 + }, + { + "epoch": 0.4083106379466865, + "grad_norm": 3.5609869652900525, + "learning_rate": 2.008171012009444e-06, + "loss": 0.5705, + "step": 9895 + }, + { + "epoch": 0.4083519022860444, + "grad_norm": 8.137142173213721, + "learning_rate": 2.0079823865394426e-06, + "loss": 0.5276, + "step": 9896 + }, + { + "epoch": 0.4083931666254023, + "grad_norm": 173.9573390864019, + "learning_rate": 2.0077937519956046e-06, + "loss": 0.5162, + "step": 9897 + }, + { + "epoch": 0.40843443096476023, + "grad_norm": 4.119448581222983, + "learning_rate": 2.0076051083813e-06, + "loss": 0.5049, + "step": 9898 + }, + { + "epoch": 0.4084756953041182, + "grad_norm": 1.9969817112970822, + "learning_rate": 2.0074164556998984e-06, + "loss": 0.5363, + "step": 9899 + }, + { + "epoch": 0.4085169596434761, + "grad_norm": 2.520102273003962, + "learning_rate": 2.007227793954769e-06, + "loss": 0.5555, + "step": 9900 + }, + { + "epoch": 0.40855822398283403, + "grad_norm": 3.321918065445458, + "learning_rate": 2.007039123149283e-06, + "loss": 0.4938, + "step": 9901 + }, + { + "epoch": 0.40859948832219195, + "grad_norm": 2.0933600325967125, + "learning_rate": 2.0068504432868097e-06, + "loss": 0.5519, + "step": 9902 + }, + { + "epoch": 0.40864075266154987, + "grad_norm": 10.746472879840795, + "learning_rate": 2.0066617543707194e-06, + "loss": 0.5349, + "step": 9903 + }, + { + "epoch": 0.40868201700090784, + "grad_norm": 2.9999228551173878, + "learning_rate": 2.0064730564043827e-06, + "loss": 0.5023, + "step": 9904 + }, + { + "epoch": 0.40872328134026575, + "grad_norm": 2.2288211442093813, + "learning_rate": 2.00628434939117e-06, + "loss": 0.4736, + "step": 9905 + }, + { + "epoch": 0.40876454567962367, + "grad_norm": 2.601732218583207, + "learning_rate": 2.0060956333344525e-06, + "loss": 0.512, + "step": 9906 + }, + { + "epoch": 0.4088058100189816, + "grad_norm": 6.565719546567912, + "learning_rate": 2.005906908237601e-06, + "loss": 0.5431, + "step": 9907 + }, + { + "epoch": 0.4088470743583395, + "grad_norm": 3.638732179200316, + "learning_rate": 2.005718174103986e-06, + "loss": 0.5379, + "step": 9908 + }, + { + "epoch": 0.4088883386976975, + "grad_norm": 6.582695598134508, + "learning_rate": 2.0055294309369806e-06, + "loss": 0.5173, + "step": 9909 + }, + { + "epoch": 0.4089296030370554, + "grad_norm": 2.776853867837138, + "learning_rate": 2.005340678739954e-06, + "loss": 0.5439, + "step": 9910 + }, + { + "epoch": 0.4089708673764133, + "grad_norm": 5.88913238959001, + "learning_rate": 2.0051519175162783e-06, + "loss": 0.5446, + "step": 9911 + }, + { + "epoch": 0.4090121317157712, + "grad_norm": 3.0136471118564683, + "learning_rate": 2.0049631472693265e-06, + "loss": 0.552, + "step": 9912 + }, + { + "epoch": 0.40905339605512914, + "grad_norm": 4.294233433521441, + "learning_rate": 2.004774368002469e-06, + "loss": 0.5743, + "step": 9913 + }, + { + "epoch": 0.4090946603944871, + "grad_norm": 5.359095324686034, + "learning_rate": 2.004585579719079e-06, + "loss": 0.544, + "step": 9914 + }, + { + "epoch": 0.409135924733845, + "grad_norm": 4.024082325776465, + "learning_rate": 2.004396782422528e-06, + "loss": 0.5647, + "step": 9915 + }, + { + "epoch": 0.40917718907320294, + "grad_norm": 3.2181528083581155, + "learning_rate": 2.004207976116189e-06, + "loss": 0.568, + "step": 9916 + }, + { + "epoch": 0.40921845341256086, + "grad_norm": 1.9386146059236842, + "learning_rate": 2.004019160803434e-06, + "loss": 0.4571, + "step": 9917 + }, + { + "epoch": 0.4092597177519188, + "grad_norm": 3.5475097917088734, + "learning_rate": 2.0038303364876357e-06, + "loss": 0.5571, + "step": 9918 + }, + { + "epoch": 0.40930098209127674, + "grad_norm": 8.352510989902857, + "learning_rate": 2.0036415031721676e-06, + "loss": 0.5616, + "step": 9919 + }, + { + "epoch": 0.40934224643063466, + "grad_norm": 5.754801183980683, + "learning_rate": 2.0034526608604023e-06, + "loss": 0.5452, + "step": 9920 + }, + { + "epoch": 0.4093835107699926, + "grad_norm": 3.4483974894927196, + "learning_rate": 2.0032638095557128e-06, + "loss": 0.6112, + "step": 9921 + }, + { + "epoch": 0.4094247751093505, + "grad_norm": 3.874215467429143, + "learning_rate": 2.0030749492614727e-06, + "loss": 0.5391, + "step": 9922 + }, + { + "epoch": 0.4094660394487084, + "grad_norm": 3.9927288470204028, + "learning_rate": 2.002886079981056e-06, + "loss": 0.5501, + "step": 9923 + }, + { + "epoch": 0.4095073037880664, + "grad_norm": 6.088858472778181, + "learning_rate": 2.002697201717835e-06, + "loss": 0.5321, + "step": 9924 + }, + { + "epoch": 0.4095485681274243, + "grad_norm": 2.261156332828991, + "learning_rate": 2.002508314475185e-06, + "loss": 0.5497, + "step": 9925 + }, + { + "epoch": 0.4095898324667822, + "grad_norm": 3.7037415107026415, + "learning_rate": 2.0023194182564793e-06, + "loss": 0.5435, + "step": 9926 + }, + { + "epoch": 0.4096310968061401, + "grad_norm": 2.4295723387644355, + "learning_rate": 2.002130513065092e-06, + "loss": 0.536, + "step": 9927 + }, + { + "epoch": 0.40967236114549804, + "grad_norm": 3.0575896582653934, + "learning_rate": 2.001941598904398e-06, + "loss": 0.5414, + "step": 9928 + }, + { + "epoch": 0.409713625484856, + "grad_norm": 4.803297901744025, + "learning_rate": 2.0017526757777715e-06, + "loss": 0.5477, + "step": 9929 + }, + { + "epoch": 0.40975488982421393, + "grad_norm": 3.1121396951931923, + "learning_rate": 2.0015637436885864e-06, + "loss": 0.5413, + "step": 9930 + }, + { + "epoch": 0.40979615416357185, + "grad_norm": 5.91014872963361, + "learning_rate": 2.0013748026402183e-06, + "loss": 0.5518, + "step": 9931 + }, + { + "epoch": 0.40983741850292976, + "grad_norm": 4.406838875487524, + "learning_rate": 2.0011858526360416e-06, + "loss": 0.5379, + "step": 9932 + }, + { + "epoch": 0.4098786828422877, + "grad_norm": 4.757784919862134, + "learning_rate": 2.0009968936794327e-06, + "loss": 0.5112, + "step": 9933 + }, + { + "epoch": 0.4099199471816456, + "grad_norm": 2.2496104807526085, + "learning_rate": 2.0008079257737657e-06, + "loss": 0.5193, + "step": 9934 + }, + { + "epoch": 0.40996121152100357, + "grad_norm": 6.003422338125636, + "learning_rate": 2.0006189489224164e-06, + "loss": 0.5224, + "step": 9935 + }, + { + "epoch": 0.4100024758603615, + "grad_norm": 4.612589472840221, + "learning_rate": 2.0004299631287595e-06, + "loss": 0.5631, + "step": 9936 + }, + { + "epoch": 0.4100437401997194, + "grad_norm": 3.8643463592090206, + "learning_rate": 2.0002409683961724e-06, + "loss": 0.5435, + "step": 9937 + }, + { + "epoch": 0.4100850045390773, + "grad_norm": 5.417003864169672, + "learning_rate": 2.00005196472803e-06, + "loss": 0.5702, + "step": 9938 + }, + { + "epoch": 0.41012626887843523, + "grad_norm": 3.364306261458697, + "learning_rate": 1.999862952127709e-06, + "loss": 0.5493, + "step": 9939 + }, + { + "epoch": 0.4101675332177932, + "grad_norm": 3.4841027683550823, + "learning_rate": 1.999673930598584e-06, + "loss": 0.6243, + "step": 9940 + }, + { + "epoch": 0.4102087975571511, + "grad_norm": 5.503217420488023, + "learning_rate": 1.9994849001440337e-06, + "loss": 0.4998, + "step": 9941 + }, + { + "epoch": 0.41025006189650903, + "grad_norm": 3.6397840089376903, + "learning_rate": 1.999295860767433e-06, + "loss": 0.493, + "step": 9942 + }, + { + "epoch": 0.41029132623586695, + "grad_norm": 2.8381921071046787, + "learning_rate": 1.9991068124721597e-06, + "loss": 0.516, + "step": 9943 + }, + { + "epoch": 0.41033259057522486, + "grad_norm": 3.568331501065111, + "learning_rate": 1.9989177552615898e-06, + "loss": 0.5237, + "step": 9944 + }, + { + "epoch": 0.41037385491458284, + "grad_norm": 3.624160552724965, + "learning_rate": 1.998728689139101e-06, + "loss": 0.5393, + "step": 9945 + }, + { + "epoch": 0.41041511925394075, + "grad_norm": 8.257364611115284, + "learning_rate": 1.9985396141080695e-06, + "loss": 0.547, + "step": 9946 + }, + { + "epoch": 0.41045638359329867, + "grad_norm": 3.264051132529727, + "learning_rate": 1.9983505301718737e-06, + "loss": 0.5181, + "step": 9947 + }, + { + "epoch": 0.4104976479326566, + "grad_norm": 2.6253024888819745, + "learning_rate": 1.9981614373338906e-06, + "loss": 0.525, + "step": 9948 + }, + { + "epoch": 0.4105389122720145, + "grad_norm": 6.616831143164065, + "learning_rate": 1.9979723355974986e-06, + "loss": 0.5658, + "step": 9949 + }, + { + "epoch": 0.41058017661137247, + "grad_norm": 8.065394880716237, + "learning_rate": 1.9977832249660744e-06, + "loss": 0.4626, + "step": 9950 + }, + { + "epoch": 0.4106214409507304, + "grad_norm": 5.699744831647997, + "learning_rate": 1.997594105442997e-06, + "loss": 0.5482, + "step": 9951 + }, + { + "epoch": 0.4106627052900883, + "grad_norm": 5.0908056508642305, + "learning_rate": 1.9974049770316433e-06, + "loss": 0.5564, + "step": 9952 + }, + { + "epoch": 0.4107039696294462, + "grad_norm": 5.322384416461939, + "learning_rate": 1.9972158397353932e-06, + "loss": 0.4842, + "step": 9953 + }, + { + "epoch": 0.41074523396880414, + "grad_norm": 4.532924006273419, + "learning_rate": 1.997026693557624e-06, + "loss": 0.478, + "step": 9954 + }, + { + "epoch": 0.4107864983081621, + "grad_norm": 3.4700118657143193, + "learning_rate": 1.9968375385017147e-06, + "loss": 0.5414, + "step": 9955 + }, + { + "epoch": 0.41082776264752, + "grad_norm": 4.385559004393895, + "learning_rate": 1.996648374571044e-06, + "loss": 0.5425, + "step": 9956 + }, + { + "epoch": 0.41086902698687794, + "grad_norm": 11.150855475851541, + "learning_rate": 1.996459201768991e-06, + "loss": 0.5496, + "step": 9957 + }, + { + "epoch": 0.41091029132623585, + "grad_norm": 2.8777180043484965, + "learning_rate": 1.9962700200989347e-06, + "loss": 0.4978, + "step": 9958 + }, + { + "epoch": 0.41095155566559377, + "grad_norm": 4.620684639731055, + "learning_rate": 1.9960808295642544e-06, + "loss": 0.5082, + "step": 9959 + }, + { + "epoch": 0.41099282000495174, + "grad_norm": 2.6919014409790845, + "learning_rate": 1.9958916301683293e-06, + "loss": 0.5399, + "step": 9960 + }, + { + "epoch": 0.41103408434430966, + "grad_norm": 4.562537543719473, + "learning_rate": 1.9957024219145398e-06, + "loss": 0.5259, + "step": 9961 + }, + { + "epoch": 0.4110753486836676, + "grad_norm": 2.9374224964938707, + "learning_rate": 1.9955132048062642e-06, + "loss": 0.531, + "step": 9962 + }, + { + "epoch": 0.4111166130230255, + "grad_norm": 8.901428522998089, + "learning_rate": 1.995323978846883e-06, + "loss": 0.5297, + "step": 9963 + }, + { + "epoch": 0.4111578773623834, + "grad_norm": 4.443054326152097, + "learning_rate": 1.9951347440397775e-06, + "loss": 0.5589, + "step": 9964 + }, + { + "epoch": 0.4111991417017414, + "grad_norm": 2.5592476519711997, + "learning_rate": 1.994945500388326e-06, + "loss": 0.5128, + "step": 9965 + }, + { + "epoch": 0.4112404060410993, + "grad_norm": 3.3858097403948837, + "learning_rate": 1.99475624789591e-06, + "loss": 0.5876, + "step": 9966 + }, + { + "epoch": 0.4112816703804572, + "grad_norm": 3.4408858717938116, + "learning_rate": 1.9945669865659097e-06, + "loss": 0.5796, + "step": 9967 + }, + { + "epoch": 0.4113229347198151, + "grad_norm": 2.9103194553764817, + "learning_rate": 1.9943777164017058e-06, + "loss": 0.479, + "step": 9968 + }, + { + "epoch": 0.41136419905917304, + "grad_norm": 7.408143974631481, + "learning_rate": 1.994188437406679e-06, + "loss": 0.5352, + "step": 9969 + }, + { + "epoch": 0.411405463398531, + "grad_norm": 3.4866459669248173, + "learning_rate": 1.9939991495842106e-06, + "loss": 0.5421, + "step": 9970 + }, + { + "epoch": 0.41144672773788893, + "grad_norm": 2.8263057733892643, + "learning_rate": 1.993809852937681e-06, + "loss": 0.5494, + "step": 9971 + }, + { + "epoch": 0.41148799207724684, + "grad_norm": 4.137482025792922, + "learning_rate": 1.993620547470473e-06, + "loss": 0.486, + "step": 9972 + }, + { + "epoch": 0.41152925641660476, + "grad_norm": 4.508606126727425, + "learning_rate": 1.9934312331859665e-06, + "loss": 0.5783, + "step": 9973 + }, + { + "epoch": 0.4115705207559627, + "grad_norm": 2.9263599681353645, + "learning_rate": 1.9932419100875447e-06, + "loss": 0.5375, + "step": 9974 + }, + { + "epoch": 0.41161178509532065, + "grad_norm": 4.565624274451394, + "learning_rate": 1.9930525781785877e-06, + "loss": 0.5708, + "step": 9975 + }, + { + "epoch": 0.41165304943467856, + "grad_norm": 4.227183011000649, + "learning_rate": 1.9928632374624787e-06, + "loss": 0.5445, + "step": 9976 + }, + { + "epoch": 0.4116943137740365, + "grad_norm": 3.6063436921655394, + "learning_rate": 1.992673887942599e-06, + "loss": 0.5329, + "step": 9977 + }, + { + "epoch": 0.4117355781133944, + "grad_norm": 4.844861389400679, + "learning_rate": 1.992484529622331e-06, + "loss": 0.5292, + "step": 9978 + }, + { + "epoch": 0.4117768424527523, + "grad_norm": 2.258208207673265, + "learning_rate": 1.9922951625050574e-06, + "loss": 0.537, + "step": 9979 + }, + { + "epoch": 0.4118181067921103, + "grad_norm": 3.022376412811748, + "learning_rate": 1.992105786594161e-06, + "loss": 0.5831, + "step": 9980 + }, + { + "epoch": 0.4118593711314682, + "grad_norm": 4.384075187579641, + "learning_rate": 1.991916401893024e-06, + "loss": 0.5495, + "step": 9981 + }, + { + "epoch": 0.4119006354708261, + "grad_norm": 8.904525294325444, + "learning_rate": 1.991727008405029e-06, + "loss": 0.6166, + "step": 9982 + }, + { + "epoch": 0.41194189981018403, + "grad_norm": 9.333031757925855, + "learning_rate": 1.99153760613356e-06, + "loss": 0.5466, + "step": 9983 + }, + { + "epoch": 0.41198316414954195, + "grad_norm": 3.1711875524012867, + "learning_rate": 1.991348195082e-06, + "loss": 0.583, + "step": 9984 + }, + { + "epoch": 0.4120244284888999, + "grad_norm": 2.865427267038424, + "learning_rate": 1.9911587752537314e-06, + "loss": 0.4748, + "step": 9985 + }, + { + "epoch": 0.41206569282825783, + "grad_norm": 2.975463972307059, + "learning_rate": 1.9909693466521388e-06, + "loss": 0.5302, + "step": 9986 + }, + { + "epoch": 0.41210695716761575, + "grad_norm": 3.7919900279285494, + "learning_rate": 1.9907799092806054e-06, + "loss": 0.5472, + "step": 9987 + }, + { + "epoch": 0.41214822150697367, + "grad_norm": 3.0542871077673808, + "learning_rate": 1.9905904631425153e-06, + "loss": 0.5253, + "step": 9988 + }, + { + "epoch": 0.4121894858463316, + "grad_norm": 9.85590398899029, + "learning_rate": 1.990401008241252e-06, + "loss": 0.5238, + "step": 9989 + }, + { + "epoch": 0.41223075018568955, + "grad_norm": 2.538846975091384, + "learning_rate": 1.9902115445801997e-06, + "loss": 0.5828, + "step": 9990 + }, + { + "epoch": 0.41227201452504747, + "grad_norm": 6.290893138806372, + "learning_rate": 1.990022072162743e-06, + "loss": 0.533, + "step": 9991 + }, + { + "epoch": 0.4123132788644054, + "grad_norm": 4.466274729302091, + "learning_rate": 1.9898325909922663e-06, + "loss": 0.5296, + "step": 9992 + }, + { + "epoch": 0.4123545432037633, + "grad_norm": 4.496418919065471, + "learning_rate": 1.989643101072154e-06, + "loss": 0.5272, + "step": 9993 + }, + { + "epoch": 0.4123958075431212, + "grad_norm": 3.9714189776236783, + "learning_rate": 1.9894536024057913e-06, + "loss": 0.557, + "step": 9994 + }, + { + "epoch": 0.41243707188247913, + "grad_norm": 3.7499936210950144, + "learning_rate": 1.989264094996563e-06, + "loss": 0.4992, + "step": 9995 + }, + { + "epoch": 0.4124783362218371, + "grad_norm": 7.892033797952546, + "learning_rate": 1.9890745788478533e-06, + "loss": 0.495, + "step": 9996 + }, + { + "epoch": 0.412519600561195, + "grad_norm": 5.066188982706094, + "learning_rate": 1.988885053963049e-06, + "loss": 0.5015, + "step": 9997 + }, + { + "epoch": 0.41256086490055294, + "grad_norm": 4.865714730743687, + "learning_rate": 1.9886955203455343e-06, + "loss": 0.5266, + "step": 9998 + }, + { + "epoch": 0.41260212923991085, + "grad_norm": 4.775345605432494, + "learning_rate": 1.988505977998695e-06, + "loss": 0.4942, + "step": 9999 + }, + { + "epoch": 0.41264339357926877, + "grad_norm": 2.700264195829272, + "learning_rate": 1.9883164269259173e-06, + "loss": 0.6012, + "step": 10000 + }, + { + "epoch": 0.41268465791862674, + "grad_norm": 5.861115502056389, + "learning_rate": 1.988126867130586e-06, + "loss": 0.4557, + "step": 10001 + }, + { + "epoch": 0.41272592225798466, + "grad_norm": 3.1919147797525222, + "learning_rate": 1.9879372986160878e-06, + "loss": 0.5661, + "step": 10002 + }, + { + "epoch": 0.4127671865973426, + "grad_norm": 3.5317305567824056, + "learning_rate": 1.987747721385809e-06, + "loss": 0.5699, + "step": 10003 + }, + { + "epoch": 0.4128084509367005, + "grad_norm": 4.351342609658555, + "learning_rate": 1.9875581354431356e-06, + "loss": 0.488, + "step": 10004 + }, + { + "epoch": 0.4128497152760584, + "grad_norm": 13.210241448004156, + "learning_rate": 1.9873685407914545e-06, + "loss": 0.5322, + "step": 10005 + }, + { + "epoch": 0.4128909796154164, + "grad_norm": 4.617450778118556, + "learning_rate": 1.9871789374341518e-06, + "loss": 0.4744, + "step": 10006 + }, + { + "epoch": 0.4129322439547743, + "grad_norm": 2.9541280085199544, + "learning_rate": 1.9869893253746146e-06, + "loss": 0.5629, + "step": 10007 + }, + { + "epoch": 0.4129735082941322, + "grad_norm": 11.44504583612414, + "learning_rate": 1.98679970461623e-06, + "loss": 0.5363, + "step": 10008 + }, + { + "epoch": 0.4130147726334901, + "grad_norm": 3.7334354537209915, + "learning_rate": 1.9866100751623847e-06, + "loss": 0.6437, + "step": 10009 + }, + { + "epoch": 0.41305603697284804, + "grad_norm": 3.302633177559616, + "learning_rate": 1.986420437016466e-06, + "loss": 0.56, + "step": 10010 + }, + { + "epoch": 0.413097301312206, + "grad_norm": 2.902633524006251, + "learning_rate": 1.986230790181862e-06, + "loss": 0.5918, + "step": 10011 + }, + { + "epoch": 0.4131385656515639, + "grad_norm": 8.199861977505536, + "learning_rate": 1.986041134661959e-06, + "loss": 0.5235, + "step": 10012 + }, + { + "epoch": 0.41317982999092184, + "grad_norm": 3.2837626057369356, + "learning_rate": 1.9858514704601463e-06, + "loss": 0.5999, + "step": 10013 + }, + { + "epoch": 0.41322109433027976, + "grad_norm": 2.7362823565788537, + "learning_rate": 1.98566179757981e-06, + "loss": 0.5445, + "step": 10014 + }, + { + "epoch": 0.4132623586696377, + "grad_norm": 3.3405367439954183, + "learning_rate": 1.9854721160243394e-06, + "loss": 0.5124, + "step": 10015 + }, + { + "epoch": 0.41330362300899565, + "grad_norm": 33.042714388964384, + "learning_rate": 1.9852824257971223e-06, + "loss": 0.5246, + "step": 10016 + }, + { + "epoch": 0.41334488734835356, + "grad_norm": 3.458360836287872, + "learning_rate": 1.9850927269015475e-06, + "loss": 0.5567, + "step": 10017 + }, + { + "epoch": 0.4133861516877115, + "grad_norm": 4.425336391319824, + "learning_rate": 1.9849030193410026e-06, + "loss": 0.5053, + "step": 10018 + }, + { + "epoch": 0.4134274160270694, + "grad_norm": 2.5368389002948746, + "learning_rate": 1.984713303118877e-06, + "loss": 0.518, + "step": 10019 + }, + { + "epoch": 0.4134686803664273, + "grad_norm": 13.79937911923059, + "learning_rate": 1.984523578238559e-06, + "loss": 0.564, + "step": 10020 + }, + { + "epoch": 0.4135099447057853, + "grad_norm": 3.558498419898969, + "learning_rate": 1.984333844703438e-06, + "loss": 0.5158, + "step": 10021 + }, + { + "epoch": 0.4135512090451432, + "grad_norm": 6.805300278267488, + "learning_rate": 1.984144102516903e-06, + "loss": 0.6051, + "step": 10022 + }, + { + "epoch": 0.4135924733845011, + "grad_norm": 6.898345178860106, + "learning_rate": 1.983954351682343e-06, + "loss": 0.5217, + "step": 10023 + }, + { + "epoch": 0.41363373772385903, + "grad_norm": 25.01908629340869, + "learning_rate": 1.9837645922031476e-06, + "loss": 0.5413, + "step": 10024 + }, + { + "epoch": 0.41367500206321695, + "grad_norm": 4.1137337482477925, + "learning_rate": 1.9835748240827065e-06, + "loss": 0.5495, + "step": 10025 + }, + { + "epoch": 0.4137162664025749, + "grad_norm": 6.72857748118305, + "learning_rate": 1.9833850473244094e-06, + "loss": 0.5457, + "step": 10026 + }, + { + "epoch": 0.41375753074193283, + "grad_norm": 3.7443579397009836, + "learning_rate": 1.983195261931646e-06, + "loss": 0.57, + "step": 10027 + }, + { + "epoch": 0.41379879508129075, + "grad_norm": 6.508963764383566, + "learning_rate": 1.9830054679078062e-06, + "loss": 0.6463, + "step": 10028 + }, + { + "epoch": 0.41384005942064866, + "grad_norm": 4.66489329430007, + "learning_rate": 1.982815665256281e-06, + "loss": 0.5452, + "step": 10029 + }, + { + "epoch": 0.4138813237600066, + "grad_norm": 3.55646690291256, + "learning_rate": 1.9826258539804596e-06, + "loss": 0.5059, + "step": 10030 + }, + { + "epoch": 0.41392258809936455, + "grad_norm": 2.7487665865213566, + "learning_rate": 1.9824360340837334e-06, + "loss": 0.5575, + "step": 10031 + }, + { + "epoch": 0.41396385243872247, + "grad_norm": 6.93281192271388, + "learning_rate": 1.982246205569493e-06, + "loss": 0.5249, + "step": 10032 + }, + { + "epoch": 0.4140051167780804, + "grad_norm": 3.3255792049340442, + "learning_rate": 1.9820563684411286e-06, + "loss": 0.4851, + "step": 10033 + }, + { + "epoch": 0.4140463811174383, + "grad_norm": 7.376765295294819, + "learning_rate": 1.981866522702032e-06, + "loss": 0.5493, + "step": 10034 + }, + { + "epoch": 0.4140876454567962, + "grad_norm": 2.511131172489835, + "learning_rate": 1.981676668355593e-06, + "loss": 0.5026, + "step": 10035 + }, + { + "epoch": 0.4141289097961542, + "grad_norm": 6.070359173432048, + "learning_rate": 1.9814868054052044e-06, + "loss": 0.5633, + "step": 10036 + }, + { + "epoch": 0.4141701741355121, + "grad_norm": 3.459929004565779, + "learning_rate": 1.981296933854257e-06, + "loss": 0.5521, + "step": 10037 + }, + { + "epoch": 0.41421143847487, + "grad_norm": 4.444920671812053, + "learning_rate": 1.9811070537061418e-06, + "loss": 0.4956, + "step": 10038 + }, + { + "epoch": 0.41425270281422794, + "grad_norm": 5.363405225311239, + "learning_rate": 1.9809171649642517e-06, + "loss": 0.5108, + "step": 10039 + }, + { + "epoch": 0.41429396715358585, + "grad_norm": 3.374917554298036, + "learning_rate": 1.980727267631978e-06, + "loss": 0.5058, + "step": 10040 + }, + { + "epoch": 0.4143352314929438, + "grad_norm": 2.5680603482366453, + "learning_rate": 1.980537361712712e-06, + "loss": 0.534, + "step": 10041 + }, + { + "epoch": 0.41437649583230174, + "grad_norm": 2.967823225377798, + "learning_rate": 1.9803474472098467e-06, + "loss": 0.5498, + "step": 10042 + }, + { + "epoch": 0.41441776017165965, + "grad_norm": 5.704168506101794, + "learning_rate": 1.9801575241267748e-06, + "loss": 0.5653, + "step": 10043 + }, + { + "epoch": 0.41445902451101757, + "grad_norm": 2.1141531578059833, + "learning_rate": 1.9799675924668882e-06, + "loss": 0.5357, + "step": 10044 + }, + { + "epoch": 0.4145002888503755, + "grad_norm": 3.215326395185507, + "learning_rate": 1.97977765223358e-06, + "loss": 0.5747, + "step": 10045 + }, + { + "epoch": 0.41454155318973346, + "grad_norm": 3.666376301256413, + "learning_rate": 1.9795877034302424e-06, + "loss": 0.4573, + "step": 10046 + }, + { + "epoch": 0.4145828175290914, + "grad_norm": 2.7237883736913737, + "learning_rate": 1.9793977460602682e-06, + "loss": 0.5412, + "step": 10047 + }, + { + "epoch": 0.4146240818684493, + "grad_norm": 2.889552469123142, + "learning_rate": 1.979207780127052e-06, + "loss": 0.5523, + "step": 10048 + }, + { + "epoch": 0.4146653462078072, + "grad_norm": 6.57725485364314, + "learning_rate": 1.9790178056339852e-06, + "loss": 0.5197, + "step": 10049 + }, + { + "epoch": 0.4147066105471651, + "grad_norm": 7.058128065099846, + "learning_rate": 1.9788278225844624e-06, + "loss": 0.5852, + "step": 10050 + }, + { + "epoch": 0.4147478748865231, + "grad_norm": 4.911435652480269, + "learning_rate": 1.9786378309818766e-06, + "loss": 0.5353, + "step": 10051 + }, + { + "epoch": 0.414789139225881, + "grad_norm": 2.410177066200337, + "learning_rate": 1.9784478308296222e-06, + "loss": 0.5228, + "step": 10052 + }, + { + "epoch": 0.4148304035652389, + "grad_norm": 2.5310246494496, + "learning_rate": 1.9782578221310926e-06, + "loss": 0.5325, + "step": 10053 + }, + { + "epoch": 0.41487166790459684, + "grad_norm": 3.926303136949977, + "learning_rate": 1.9780678048896817e-06, + "loss": 0.5516, + "step": 10054 + }, + { + "epoch": 0.41491293224395476, + "grad_norm": 3.0263935176951735, + "learning_rate": 1.9778777791087837e-06, + "loss": 0.5644, + "step": 10055 + }, + { + "epoch": 0.41495419658331273, + "grad_norm": 3.943600794523656, + "learning_rate": 1.9776877447917934e-06, + "loss": 0.5191, + "step": 10056 + }, + { + "epoch": 0.41499546092267064, + "grad_norm": 3.0183715227736156, + "learning_rate": 1.977497701942105e-06, + "loss": 0.5514, + "step": 10057 + }, + { + "epoch": 0.41503672526202856, + "grad_norm": 2.5923728272084836, + "learning_rate": 1.977307650563113e-06, + "loss": 0.5107, + "step": 10058 + }, + { + "epoch": 0.4150779896013865, + "grad_norm": 2.5445702139378636, + "learning_rate": 1.9771175906582125e-06, + "loss": 0.5513, + "step": 10059 + }, + { + "epoch": 0.4151192539407444, + "grad_norm": 2.71275040763935, + "learning_rate": 1.976927522230798e-06, + "loss": 0.5709, + "step": 10060 + }, + { + "epoch": 0.4151605182801023, + "grad_norm": 2.551510668379543, + "learning_rate": 1.976737445284265e-06, + "loss": 0.5315, + "step": 10061 + }, + { + "epoch": 0.4152017826194603, + "grad_norm": 2.2822039540036116, + "learning_rate": 1.9765473598220086e-06, + "loss": 0.5257, + "step": 10062 + }, + { + "epoch": 0.4152430469588182, + "grad_norm": 3.6922934737932636, + "learning_rate": 1.976357265847424e-06, + "loss": 0.5475, + "step": 10063 + }, + { + "epoch": 0.4152843112981761, + "grad_norm": 4.934868394515991, + "learning_rate": 1.976167163363907e-06, + "loss": 0.5274, + "step": 10064 + }, + { + "epoch": 0.41532557563753403, + "grad_norm": 2.2403048583581127, + "learning_rate": 1.9759770523748536e-06, + "loss": 0.5774, + "step": 10065 + }, + { + "epoch": 0.41536683997689194, + "grad_norm": 4.715703530131666, + "learning_rate": 1.9757869328836593e-06, + "loss": 0.4944, + "step": 10066 + }, + { + "epoch": 0.4154081043162499, + "grad_norm": 5.924671557259179, + "learning_rate": 1.97559680489372e-06, + "loss": 0.5976, + "step": 10067 + }, + { + "epoch": 0.41544936865560783, + "grad_norm": 7.767756238370824, + "learning_rate": 1.975406668408432e-06, + "loss": 0.5375, + "step": 10068 + }, + { + "epoch": 0.41549063299496575, + "grad_norm": 2.889913720808006, + "learning_rate": 1.9752165234311915e-06, + "loss": 0.5748, + "step": 10069 + }, + { + "epoch": 0.41553189733432366, + "grad_norm": 2.2933772170967126, + "learning_rate": 1.975026369965395e-06, + "loss": 0.4692, + "step": 10070 + }, + { + "epoch": 0.4155731616736816, + "grad_norm": 3.4420003603841463, + "learning_rate": 1.9748362080144396e-06, + "loss": 0.5167, + "step": 10071 + }, + { + "epoch": 0.41561442601303955, + "grad_norm": 7.57685988686958, + "learning_rate": 1.974646037581721e-06, + "loss": 0.5857, + "step": 10072 + }, + { + "epoch": 0.41565569035239747, + "grad_norm": 2.4706386101823123, + "learning_rate": 1.9744558586706376e-06, + "loss": 0.5629, + "step": 10073 + }, + { + "epoch": 0.4156969546917554, + "grad_norm": 2.9381905957381407, + "learning_rate": 1.9742656712845853e-06, + "loss": 0.5837, + "step": 10074 + }, + { + "epoch": 0.4157382190311133, + "grad_norm": 3.1485284250108463, + "learning_rate": 1.9740754754269617e-06, + "loss": 0.5556, + "step": 10075 + }, + { + "epoch": 0.4157794833704712, + "grad_norm": 6.548366257358343, + "learning_rate": 1.9738852711011644e-06, + "loss": 0.5592, + "step": 10076 + }, + { + "epoch": 0.4158207477098292, + "grad_norm": 4.217199828750218, + "learning_rate": 1.97369505831059e-06, + "loss": 0.5899, + "step": 10077 + }, + { + "epoch": 0.4158620120491871, + "grad_norm": 20.58434819583355, + "learning_rate": 1.9735048370586375e-06, + "loss": 0.4896, + "step": 10078 + }, + { + "epoch": 0.415903276388545, + "grad_norm": 3.07757542703752, + "learning_rate": 1.9733146073487044e-06, + "loss": 0.601, + "step": 10079 + }, + { + "epoch": 0.41594454072790293, + "grad_norm": 3.8422145574449362, + "learning_rate": 1.9731243691841873e-06, + "loss": 0.557, + "step": 10080 + }, + { + "epoch": 0.41598580506726085, + "grad_norm": 3.3315489169360486, + "learning_rate": 1.972934122568486e-06, + "loss": 0.4727, + "step": 10081 + }, + { + "epoch": 0.4160270694066188, + "grad_norm": 2.4382828848844142, + "learning_rate": 1.972743867504998e-06, + "loss": 0.4814, + "step": 10082 + }, + { + "epoch": 0.41606833374597674, + "grad_norm": 3.563710056276121, + "learning_rate": 1.972553603997122e-06, + "loss": 0.4916, + "step": 10083 + }, + { + "epoch": 0.41610959808533465, + "grad_norm": 2.3980481699678564, + "learning_rate": 1.9723633320482562e-06, + "loss": 0.5012, + "step": 10084 + }, + { + "epoch": 0.41615086242469257, + "grad_norm": 11.012833988536045, + "learning_rate": 1.9721730516618e-06, + "loss": 0.5119, + "step": 10085 + }, + { + "epoch": 0.4161921267640505, + "grad_norm": 6.054566679884301, + "learning_rate": 1.9719827628411523e-06, + "loss": 0.5241, + "step": 10086 + }, + { + "epoch": 0.41623339110340846, + "grad_norm": 3.0019049336776273, + "learning_rate": 1.971792465589711e-06, + "loss": 0.5749, + "step": 10087 + }, + { + "epoch": 0.4162746554427664, + "grad_norm": 2.6373929237867744, + "learning_rate": 1.971602159910876e-06, + "loss": 0.5475, + "step": 10088 + }, + { + "epoch": 0.4163159197821243, + "grad_norm": 3.271473627383752, + "learning_rate": 1.9714118458080474e-06, + "loss": 0.5454, + "step": 10089 + }, + { + "epoch": 0.4163571841214822, + "grad_norm": 8.238197153084451, + "learning_rate": 1.971221523284623e-06, + "loss": 0.5411, + "step": 10090 + }, + { + "epoch": 0.4163984484608401, + "grad_norm": 3.0105756099625913, + "learning_rate": 1.9710311923440037e-06, + "loss": 0.5384, + "step": 10091 + }, + { + "epoch": 0.4164397128001981, + "grad_norm": 2.093192922929646, + "learning_rate": 1.9708408529895892e-06, + "loss": 0.5034, + "step": 10092 + }, + { + "epoch": 0.416480977139556, + "grad_norm": 3.9429706226856207, + "learning_rate": 1.970650505224779e-06, + "loss": 0.4703, + "step": 10093 + }, + { + "epoch": 0.4165222414789139, + "grad_norm": 6.235968324315422, + "learning_rate": 1.9704601490529736e-06, + "loss": 0.52, + "step": 10094 + }, + { + "epoch": 0.41656350581827184, + "grad_norm": 4.25635527837255, + "learning_rate": 1.970269784477573e-06, + "loss": 0.5329, + "step": 10095 + }, + { + "epoch": 0.41660477015762976, + "grad_norm": 2.985506490081808, + "learning_rate": 1.9700794115019775e-06, + "loss": 0.5214, + "step": 10096 + }, + { + "epoch": 0.4166460344969877, + "grad_norm": 3.1926100368506956, + "learning_rate": 1.9698890301295878e-06, + "loss": 0.5414, + "step": 10097 + }, + { + "epoch": 0.41668729883634564, + "grad_norm": 12.988468224381926, + "learning_rate": 1.969698640363804e-06, + "loss": 0.5585, + "step": 10098 + }, + { + "epoch": 0.41672856317570356, + "grad_norm": 4.649241006295882, + "learning_rate": 1.9695082422080287e-06, + "loss": 0.5375, + "step": 10099 + }, + { + "epoch": 0.4167698275150615, + "grad_norm": 4.597742894335, + "learning_rate": 1.9693178356656607e-06, + "loss": 0.5463, + "step": 10100 + }, + { + "epoch": 0.4168110918544194, + "grad_norm": 3.7209361520964728, + "learning_rate": 1.9691274207401025e-06, + "loss": 0.5253, + "step": 10101 + }, + { + "epoch": 0.41685235619377736, + "grad_norm": 14.675821182362471, + "learning_rate": 1.9689369974347547e-06, + "loss": 0.522, + "step": 10102 + }, + { + "epoch": 0.4168936205331353, + "grad_norm": 3.4356012509190914, + "learning_rate": 1.968746565753019e-06, + "loss": 0.5035, + "step": 10103 + }, + { + "epoch": 0.4169348848724932, + "grad_norm": 2.9512255639352145, + "learning_rate": 1.9685561256982974e-06, + "loss": 0.4988, + "step": 10104 + }, + { + "epoch": 0.4169761492118511, + "grad_norm": 2.750573695433253, + "learning_rate": 1.9683656772739914e-06, + "loss": 0.4797, + "step": 10105 + }, + { + "epoch": 0.417017413551209, + "grad_norm": 2.5592137736955203, + "learning_rate": 1.968175220483502e-06, + "loss": 0.5401, + "step": 10106 + }, + { + "epoch": 0.417058677890567, + "grad_norm": 2.389627242772832, + "learning_rate": 1.9679847553302326e-06, + "loss": 0.4587, + "step": 10107 + }, + { + "epoch": 0.4170999422299249, + "grad_norm": 4.284600703395214, + "learning_rate": 1.967794281817584e-06, + "loss": 0.4922, + "step": 10108 + }, + { + "epoch": 0.41714120656928283, + "grad_norm": 2.556260370934978, + "learning_rate": 1.96760379994896e-06, + "loss": 0.5078, + "step": 10109 + }, + { + "epoch": 0.41718247090864075, + "grad_norm": 2.964762221061286, + "learning_rate": 1.9674133097277617e-06, + "loss": 0.5277, + "step": 10110 + }, + { + "epoch": 0.41722373524799866, + "grad_norm": 4.380104716467218, + "learning_rate": 1.9672228111573934e-06, + "loss": 0.5279, + "step": 10111 + }, + { + "epoch": 0.41726499958735663, + "grad_norm": 2.3827316422650995, + "learning_rate": 1.9670323042412564e-06, + "loss": 0.5289, + "step": 10112 + }, + { + "epoch": 0.41730626392671455, + "grad_norm": 9.081229702333552, + "learning_rate": 1.9668417889827534e-06, + "loss": 0.5719, + "step": 10113 + }, + { + "epoch": 0.41734752826607247, + "grad_norm": 2.3469807604704305, + "learning_rate": 1.9666512653852893e-06, + "loss": 0.5946, + "step": 10114 + }, + { + "epoch": 0.4173887926054304, + "grad_norm": 10.039628981655424, + "learning_rate": 1.9664607334522655e-06, + "loss": 0.5566, + "step": 10115 + }, + { + "epoch": 0.4174300569447883, + "grad_norm": 5.532949142750004, + "learning_rate": 1.9662701931870866e-06, + "loss": 0.5089, + "step": 10116 + }, + { + "epoch": 0.41747132128414627, + "grad_norm": 8.744239596999789, + "learning_rate": 1.966079644593155e-06, + "loss": 0.5619, + "step": 10117 + }, + { + "epoch": 0.4175125856235042, + "grad_norm": 2.67581465754839, + "learning_rate": 1.9658890876738757e-06, + "loss": 0.525, + "step": 10118 + }, + { + "epoch": 0.4175538499628621, + "grad_norm": 4.1780825210680055, + "learning_rate": 1.9656985224326513e-06, + "loss": 0.5305, + "step": 10119 + }, + { + "epoch": 0.41759511430222, + "grad_norm": 6.417924839636297, + "learning_rate": 1.9655079488728866e-06, + "loss": 0.5658, + "step": 10120 + }, + { + "epoch": 0.41763637864157793, + "grad_norm": 2.822639203274466, + "learning_rate": 1.965317366997985e-06, + "loss": 0.4862, + "step": 10121 + }, + { + "epoch": 0.41767764298093585, + "grad_norm": 6.0472115732592515, + "learning_rate": 1.965126776811352e-06, + "loss": 0.4871, + "step": 10122 + }, + { + "epoch": 0.4177189073202938, + "grad_norm": 2.537445360288912, + "learning_rate": 1.96493617831639e-06, + "loss": 0.5589, + "step": 10123 + }, + { + "epoch": 0.41776017165965174, + "grad_norm": 7.2073217419854005, + "learning_rate": 1.9647455715165053e-06, + "loss": 0.5892, + "step": 10124 + }, + { + "epoch": 0.41780143599900965, + "grad_norm": 4.328230848058311, + "learning_rate": 1.9645549564151023e-06, + "loss": 0.4791, + "step": 10125 + }, + { + "epoch": 0.41784270033836757, + "grad_norm": 5.122335557144441, + "learning_rate": 1.9643643330155857e-06, + "loss": 0.4908, + "step": 10126 + }, + { + "epoch": 0.4178839646777255, + "grad_norm": 2.5151677074243635, + "learning_rate": 1.96417370132136e-06, + "loss": 0.6047, + "step": 10127 + }, + { + "epoch": 0.41792522901708345, + "grad_norm": 4.180800833127166, + "learning_rate": 1.9639830613358312e-06, + "loss": 0.5387, + "step": 10128 + }, + { + "epoch": 0.41796649335644137, + "grad_norm": 4.140035499508714, + "learning_rate": 1.963792413062404e-06, + "loss": 0.5322, + "step": 10129 + }, + { + "epoch": 0.4180077576957993, + "grad_norm": 3.0485888237341503, + "learning_rate": 1.963601756504484e-06, + "loss": 0.5138, + "step": 10130 + }, + { + "epoch": 0.4180490220351572, + "grad_norm": 5.010696309593974, + "learning_rate": 1.963411091665477e-06, + "loss": 0.4961, + "step": 10131 + }, + { + "epoch": 0.4180902863745151, + "grad_norm": 3.3271487225625376, + "learning_rate": 1.963220418548789e-06, + "loss": 0.537, + "step": 10132 + }, + { + "epoch": 0.4181315507138731, + "grad_norm": 4.307672562511857, + "learning_rate": 1.9630297371578254e-06, + "loss": 0.5265, + "step": 10133 + }, + { + "epoch": 0.418172815053231, + "grad_norm": 6.929360593243165, + "learning_rate": 1.962839047495992e-06, + "loss": 0.5282, + "step": 10134 + }, + { + "epoch": 0.4182140793925889, + "grad_norm": 4.539040891965409, + "learning_rate": 1.9626483495666955e-06, + "loss": 0.5296, + "step": 10135 + }, + { + "epoch": 0.41825534373194684, + "grad_norm": 18.04928213236187, + "learning_rate": 1.9624576433733425e-06, + "loss": 0.6016, + "step": 10136 + }, + { + "epoch": 0.41829660807130475, + "grad_norm": 6.036917957086874, + "learning_rate": 1.962266928919338e-06, + "loss": 0.5269, + "step": 10137 + }, + { + "epoch": 0.4183378724106627, + "grad_norm": 10.21543024365146, + "learning_rate": 1.962076206208091e-06, + "loss": 0.5323, + "step": 10138 + }, + { + "epoch": 0.41837913675002064, + "grad_norm": 2.685384311177499, + "learning_rate": 1.9618854752430063e-06, + "loss": 0.5668, + "step": 10139 + }, + { + "epoch": 0.41842040108937856, + "grad_norm": 3.5769645101523233, + "learning_rate": 1.961694736027492e-06, + "loss": 0.4783, + "step": 10140 + }, + { + "epoch": 0.4184616654287365, + "grad_norm": 5.4227323362626585, + "learning_rate": 1.9615039885649543e-06, + "loss": 0.5688, + "step": 10141 + }, + { + "epoch": 0.4185029297680944, + "grad_norm": 14.498922156659297, + "learning_rate": 1.961313232858801e-06, + "loss": 0.5392, + "step": 10142 + }, + { + "epoch": 0.41854419410745236, + "grad_norm": 2.1986712041761347, + "learning_rate": 1.961122468912439e-06, + "loss": 0.547, + "step": 10143 + }, + { + "epoch": 0.4185854584468103, + "grad_norm": 4.25028351290122, + "learning_rate": 1.9609316967292767e-06, + "loss": 0.5627, + "step": 10144 + }, + { + "epoch": 0.4186267227861682, + "grad_norm": 1.9068526973899445, + "learning_rate": 1.960740916312721e-06, + "loss": 0.5915, + "step": 10145 + }, + { + "epoch": 0.4186679871255261, + "grad_norm": 2.782201312980662, + "learning_rate": 1.9605501276661803e-06, + "loss": 0.5534, + "step": 10146 + }, + { + "epoch": 0.418709251464884, + "grad_norm": 7.338937358841732, + "learning_rate": 1.9603593307930617e-06, + "loss": 0.4903, + "step": 10147 + }, + { + "epoch": 0.418750515804242, + "grad_norm": 4.497614818816979, + "learning_rate": 1.960168525696774e-06, + "loss": 0.4991, + "step": 10148 + }, + { + "epoch": 0.4187917801435999, + "grad_norm": 13.504615033959961, + "learning_rate": 1.959977712380725e-06, + "loss": 0.5451, + "step": 10149 + }, + { + "epoch": 0.41883304448295783, + "grad_norm": 2.756551873443639, + "learning_rate": 1.9597868908483238e-06, + "loss": 0.5636, + "step": 10150 + }, + { + "epoch": 0.41887430882231574, + "grad_norm": 17.369460030755278, + "learning_rate": 1.9595960611029783e-06, + "loss": 0.5597, + "step": 10151 + }, + { + "epoch": 0.41891557316167366, + "grad_norm": 3.1633081570240233, + "learning_rate": 1.9594052231480976e-06, + "loss": 0.5154, + "step": 10152 + }, + { + "epoch": 0.41895683750103163, + "grad_norm": 2.635434330053725, + "learning_rate": 1.95921437698709e-06, + "loss": 0.5541, + "step": 10153 + }, + { + "epoch": 0.41899810184038955, + "grad_norm": 3.1683324464190137, + "learning_rate": 1.959023522623365e-06, + "loss": 0.5201, + "step": 10154 + }, + { + "epoch": 0.41903936617974746, + "grad_norm": 2.7779347681655184, + "learning_rate": 1.9588326600603317e-06, + "loss": 0.5137, + "step": 10155 + }, + { + "epoch": 0.4190806305191054, + "grad_norm": 6.1729595167999305, + "learning_rate": 1.9586417893013994e-06, + "loss": 0.5186, + "step": 10156 + }, + { + "epoch": 0.4191218948584633, + "grad_norm": 2.5333982480875252, + "learning_rate": 1.958450910349977e-06, + "loss": 0.5366, + "step": 10157 + }, + { + "epoch": 0.41916315919782127, + "grad_norm": 3.6316666066547256, + "learning_rate": 1.958260023209475e-06, + "loss": 0.5455, + "step": 10158 + }, + { + "epoch": 0.4192044235371792, + "grad_norm": 3.6059278059259823, + "learning_rate": 1.9580691278833023e-06, + "loss": 0.5747, + "step": 10159 + }, + { + "epoch": 0.4192456878765371, + "grad_norm": 3.0662449716136475, + "learning_rate": 1.9578782243748685e-06, + "loss": 0.5177, + "step": 10160 + }, + { + "epoch": 0.419286952215895, + "grad_norm": 2.5984568385854705, + "learning_rate": 1.957687312687585e-06, + "loss": 0.5051, + "step": 10161 + }, + { + "epoch": 0.41932821655525293, + "grad_norm": 5.616652014311639, + "learning_rate": 1.9574963928248605e-06, + "loss": 0.5595, + "step": 10162 + }, + { + "epoch": 0.4193694808946109, + "grad_norm": 4.177785379757222, + "learning_rate": 1.9573054647901066e-06, + "loss": 0.5355, + "step": 10163 + }, + { + "epoch": 0.4194107452339688, + "grad_norm": 3.019923006438009, + "learning_rate": 1.957114528586733e-06, + "loss": 0.4657, + "step": 10164 + }, + { + "epoch": 0.41945200957332673, + "grad_norm": 3.075736227731516, + "learning_rate": 1.9569235842181502e-06, + "loss": 0.5297, + "step": 10165 + }, + { + "epoch": 0.41949327391268465, + "grad_norm": 6.8970121968567115, + "learning_rate": 1.956732631687769e-06, + "loss": 0.5579, + "step": 10166 + }, + { + "epoch": 0.41953453825204257, + "grad_norm": 2.312868173411269, + "learning_rate": 1.9565416709990004e-06, + "loss": 0.5352, + "step": 10167 + }, + { + "epoch": 0.41957580259140054, + "grad_norm": 11.647712044386806, + "learning_rate": 1.9563507021552558e-06, + "loss": 0.5682, + "step": 10168 + }, + { + "epoch": 0.41961706693075845, + "grad_norm": 3.544160929796463, + "learning_rate": 1.956159725159946e-06, + "loss": 0.5741, + "step": 10169 + }, + { + "epoch": 0.41965833127011637, + "grad_norm": 3.6488452684720785, + "learning_rate": 1.9559687400164817e-06, + "loss": 0.5567, + "step": 10170 + }, + { + "epoch": 0.4196995956094743, + "grad_norm": 11.405705455965213, + "learning_rate": 1.9557777467282763e-06, + "loss": 0.5613, + "step": 10171 + }, + { + "epoch": 0.4197408599488322, + "grad_norm": 3.553692003949125, + "learning_rate": 1.955586745298739e-06, + "loss": 0.5177, + "step": 10172 + }, + { + "epoch": 0.4197821242881902, + "grad_norm": 4.131738326209539, + "learning_rate": 1.955395735731283e-06, + "loss": 0.5338, + "step": 10173 + }, + { + "epoch": 0.4198233886275481, + "grad_norm": 5.379693375557407, + "learning_rate": 1.95520471802932e-06, + "loss": 0.5197, + "step": 10174 + }, + { + "epoch": 0.419864652966906, + "grad_norm": 3.27181363295763, + "learning_rate": 1.9550136921962622e-06, + "loss": 0.6383, + "step": 10175 + }, + { + "epoch": 0.4199059173062639, + "grad_norm": 6.655337111349899, + "learning_rate": 1.9548226582355215e-06, + "loss": 0.5172, + "step": 10176 + }, + { + "epoch": 0.41994718164562184, + "grad_norm": 2.87080807850846, + "learning_rate": 1.9546316161505104e-06, + "loss": 0.4966, + "step": 10177 + }, + { + "epoch": 0.4199884459849798, + "grad_norm": 2.9629959304334363, + "learning_rate": 1.954440565944641e-06, + "loss": 0.5177, + "step": 10178 + }, + { + "epoch": 0.4200297103243377, + "grad_norm": 6.439104779426828, + "learning_rate": 1.954249507621327e-06, + "loss": 0.6372, + "step": 10179 + }, + { + "epoch": 0.42007097466369564, + "grad_norm": 3.327678685368769, + "learning_rate": 1.9540584411839793e-06, + "loss": 0.5619, + "step": 10180 + }, + { + "epoch": 0.42011223900305356, + "grad_norm": 7.782479155233177, + "learning_rate": 1.953867366636013e-06, + "loss": 0.5161, + "step": 10181 + }, + { + "epoch": 0.42015350334241147, + "grad_norm": 2.96704533315418, + "learning_rate": 1.9536762839808397e-06, + "loss": 0.549, + "step": 10182 + }, + { + "epoch": 0.4201947676817694, + "grad_norm": 2.6544724754533173, + "learning_rate": 1.953485193221873e-06, + "loss": 0.5205, + "step": 10183 + }, + { + "epoch": 0.42023603202112736, + "grad_norm": 3.814522722464707, + "learning_rate": 1.9532940943625263e-06, + "loss": 0.5124, + "step": 10184 + }, + { + "epoch": 0.4202772963604853, + "grad_norm": 3.6939434824041424, + "learning_rate": 1.9531029874062136e-06, + "loss": 0.5976, + "step": 10185 + }, + { + "epoch": 0.4203185606998432, + "grad_norm": 4.2038196724028705, + "learning_rate": 1.952911872356347e-06, + "loss": 0.5645, + "step": 10186 + }, + { + "epoch": 0.4203598250392011, + "grad_norm": 12.15806304599833, + "learning_rate": 1.952720749216342e-06, + "loss": 0.4982, + "step": 10187 + }, + { + "epoch": 0.420401089378559, + "grad_norm": 4.187352773916084, + "learning_rate": 1.952529617989612e-06, + "loss": 0.5285, + "step": 10188 + }, + { + "epoch": 0.420442353717917, + "grad_norm": 3.0004265684271108, + "learning_rate": 1.9523384786795707e-06, + "loss": 0.5362, + "step": 10189 + }, + { + "epoch": 0.4204836180572749, + "grad_norm": 4.5136829484502075, + "learning_rate": 1.9521473312896325e-06, + "loss": 0.4509, + "step": 10190 + }, + { + "epoch": 0.4205248823966328, + "grad_norm": 3.267559190573318, + "learning_rate": 1.9519561758232123e-06, + "loss": 0.5129, + "step": 10191 + }, + { + "epoch": 0.42056614673599074, + "grad_norm": 3.0551426231044294, + "learning_rate": 1.951765012283724e-06, + "loss": 0.491, + "step": 10192 + }, + { + "epoch": 0.42060741107534866, + "grad_norm": 8.140934696513426, + "learning_rate": 1.9515738406745826e-06, + "loss": 0.5682, + "step": 10193 + }, + { + "epoch": 0.42064867541470663, + "grad_norm": 3.010700455865939, + "learning_rate": 1.9513826609992027e-06, + "loss": 0.5092, + "step": 10194 + }, + { + "epoch": 0.42068993975406455, + "grad_norm": 11.408681112146331, + "learning_rate": 1.9511914732609987e-06, + "loss": 0.5045, + "step": 10195 + }, + { + "epoch": 0.42073120409342246, + "grad_norm": 2.9431040526094217, + "learning_rate": 1.9510002774633864e-06, + "loss": 0.5791, + "step": 10196 + }, + { + "epoch": 0.4207724684327804, + "grad_norm": 5.600490037141889, + "learning_rate": 1.9508090736097814e-06, + "loss": 0.5226, + "step": 10197 + }, + { + "epoch": 0.4208137327721383, + "grad_norm": 3.439370521219811, + "learning_rate": 1.9506178617035984e-06, + "loss": 0.543, + "step": 10198 + }, + { + "epoch": 0.42085499711149627, + "grad_norm": 13.009669224861417, + "learning_rate": 1.950426641748253e-06, + "loss": 0.5312, + "step": 10199 + }, + { + "epoch": 0.4208962614508542, + "grad_norm": 2.5545198362953556, + "learning_rate": 1.9502354137471614e-06, + "loss": 0.5689, + "step": 10200 + }, + { + "epoch": 0.4209375257902121, + "grad_norm": 4.889168575597074, + "learning_rate": 1.9500441777037387e-06, + "loss": 0.4985, + "step": 10201 + }, + { + "epoch": 0.42097879012957, + "grad_norm": 6.312760512875262, + "learning_rate": 1.949852933621401e-06, + "loss": 0.5267, + "step": 10202 + }, + { + "epoch": 0.42102005446892793, + "grad_norm": 3.8209617408762084, + "learning_rate": 1.9496616815035644e-06, + "loss": 0.512, + "step": 10203 + }, + { + "epoch": 0.4210613188082859, + "grad_norm": 3.1825005865738416, + "learning_rate": 1.9494704213536463e-06, + "loss": 0.5459, + "step": 10204 + }, + { + "epoch": 0.4211025831476438, + "grad_norm": 2.3058176074445083, + "learning_rate": 1.949279153175061e-06, + "loss": 0.55, + "step": 10205 + }, + { + "epoch": 0.42114384748700173, + "grad_norm": 2.652677590227173, + "learning_rate": 1.9490878769712268e-06, + "loss": 0.551, + "step": 10206 + }, + { + "epoch": 0.42118511182635965, + "grad_norm": 18.627309171981473, + "learning_rate": 1.948896592745559e-06, + "loss": 0.4908, + "step": 10207 + }, + { + "epoch": 0.42122637616571756, + "grad_norm": 3.4653940664405734, + "learning_rate": 1.9487053005014755e-06, + "loss": 0.5558, + "step": 10208 + }, + { + "epoch": 0.42126764050507554, + "grad_norm": 7.402969095731028, + "learning_rate": 1.9485140002423925e-06, + "loss": 0.5209, + "step": 10209 + }, + { + "epoch": 0.42130890484443345, + "grad_norm": 4.004569816553106, + "learning_rate": 1.9483226919717284e-06, + "loss": 0.5371, + "step": 10210 + }, + { + "epoch": 0.42135016918379137, + "grad_norm": 8.514076390640623, + "learning_rate": 1.9481313756928984e-06, + "loss": 0.458, + "step": 10211 + }, + { + "epoch": 0.4213914335231493, + "grad_norm": 4.07956472961413, + "learning_rate": 1.9479400514093216e-06, + "loss": 0.5103, + "step": 10212 + }, + { + "epoch": 0.4214326978625072, + "grad_norm": 4.313853271787308, + "learning_rate": 1.9477487191244145e-06, + "loss": 0.4892, + "step": 10213 + }, + { + "epoch": 0.42147396220186517, + "grad_norm": 2.775172892608962, + "learning_rate": 1.9475573788415954e-06, + "loss": 0.5587, + "step": 10214 + }, + { + "epoch": 0.4215152265412231, + "grad_norm": 6.885585860266667, + "learning_rate": 1.947366030564282e-06, + "loss": 0.5807, + "step": 10215 + }, + { + "epoch": 0.421556490880581, + "grad_norm": 3.2372961814900503, + "learning_rate": 1.9471746742958915e-06, + "loss": 0.4849, + "step": 10216 + }, + { + "epoch": 0.4215977552199389, + "grad_norm": 4.987675946169307, + "learning_rate": 1.9469833100398436e-06, + "loss": 0.5777, + "step": 10217 + }, + { + "epoch": 0.42163901955929683, + "grad_norm": 3.077677378134516, + "learning_rate": 1.946791937799555e-06, + "loss": 0.5135, + "step": 10218 + }, + { + "epoch": 0.4216802838986548, + "grad_norm": 4.845461434940468, + "learning_rate": 1.946600557578445e-06, + "loss": 0.501, + "step": 10219 + }, + { + "epoch": 0.4217215482380127, + "grad_norm": 2.6017535797236855, + "learning_rate": 1.9464091693799314e-06, + "loss": 0.5481, + "step": 10220 + }, + { + "epoch": 0.42176281257737064, + "grad_norm": 3.4263589103945673, + "learning_rate": 1.946217773207434e-06, + "loss": 0.5459, + "step": 10221 + }, + { + "epoch": 0.42180407691672855, + "grad_norm": 3.581264462010732, + "learning_rate": 1.94602636906437e-06, + "loss": 0.5036, + "step": 10222 + }, + { + "epoch": 0.42184534125608647, + "grad_norm": 2.3435136524369757, + "learning_rate": 1.9458349569541596e-06, + "loss": 0.5516, + "step": 10223 + }, + { + "epoch": 0.42188660559544444, + "grad_norm": 3.1679840430216526, + "learning_rate": 1.9456435368802223e-06, + "loss": 0.5368, + "step": 10224 + }, + { + "epoch": 0.42192786993480236, + "grad_norm": 8.712502507532921, + "learning_rate": 1.9454521088459756e-06, + "loss": 0.5082, + "step": 10225 + }, + { + "epoch": 0.4219691342741603, + "grad_norm": 2.2856989765693374, + "learning_rate": 1.9452606728548404e-06, + "loss": 0.5213, + "step": 10226 + }, + { + "epoch": 0.4220103986135182, + "grad_norm": 3.587592001280312, + "learning_rate": 1.9450692289102356e-06, + "loss": 0.5347, + "step": 10227 + }, + { + "epoch": 0.4220516629528761, + "grad_norm": 2.4597551469068533, + "learning_rate": 1.944877777015581e-06, + "loss": 0.4877, + "step": 10228 + }, + { + "epoch": 0.4220929272922341, + "grad_norm": 4.176892187417533, + "learning_rate": 1.944686317174296e-06, + "loss": 0.5534, + "step": 10229 + }, + { + "epoch": 0.422134191631592, + "grad_norm": 3.7151108906283947, + "learning_rate": 1.944494849389801e-06, + "loss": 0.55, + "step": 10230 + }, + { + "epoch": 0.4221754559709499, + "grad_norm": 2.7330302675134264, + "learning_rate": 1.944303373665517e-06, + "loss": 0.4819, + "step": 10231 + }, + { + "epoch": 0.4222167203103078, + "grad_norm": 4.628367485296768, + "learning_rate": 1.9441118900048623e-06, + "loss": 0.5713, + "step": 10232 + }, + { + "epoch": 0.42225798464966574, + "grad_norm": 6.882112721253505, + "learning_rate": 1.9439203984112584e-06, + "loss": 0.5338, + "step": 10233 + }, + { + "epoch": 0.4222992489890237, + "grad_norm": 4.2128933141910005, + "learning_rate": 1.9437288988881265e-06, + "loss": 0.5818, + "step": 10234 + }, + { + "epoch": 0.42234051332838163, + "grad_norm": 5.284515333108198, + "learning_rate": 1.9435373914388854e-06, + "loss": 0.5688, + "step": 10235 + }, + { + "epoch": 0.42238177766773954, + "grad_norm": 2.8298506000310804, + "learning_rate": 1.943345876066957e-06, + "loss": 0.5585, + "step": 10236 + }, + { + "epoch": 0.42242304200709746, + "grad_norm": 3.113069967430091, + "learning_rate": 1.9431543527757626e-06, + "loss": 0.4907, + "step": 10237 + }, + { + "epoch": 0.4224643063464554, + "grad_norm": 2.6937489240794177, + "learning_rate": 1.9429628215687225e-06, + "loss": 0.5487, + "step": 10238 + }, + { + "epoch": 0.42250557068581335, + "grad_norm": 4.214138495134151, + "learning_rate": 1.9427712824492587e-06, + "loss": 0.5507, + "step": 10239 + }, + { + "epoch": 0.42254683502517126, + "grad_norm": 3.01131824330084, + "learning_rate": 1.942579735420792e-06, + "loss": 0.5566, + "step": 10240 + }, + { + "epoch": 0.4225880993645292, + "grad_norm": 6.463111231562496, + "learning_rate": 1.942388180486744e-06, + "loss": 0.4704, + "step": 10241 + }, + { + "epoch": 0.4226293637038871, + "grad_norm": 2.9936675808525433, + "learning_rate": 1.9421966176505363e-06, + "loss": 0.5182, + "step": 10242 + }, + { + "epoch": 0.422670628043245, + "grad_norm": 2.8086770153734735, + "learning_rate": 1.942005046915591e-06, + "loss": 0.5507, + "step": 10243 + }, + { + "epoch": 0.4227118923826029, + "grad_norm": 3.36517277553269, + "learning_rate": 1.9418134682853297e-06, + "loss": 0.5402, + "step": 10244 + }, + { + "epoch": 0.4227531567219609, + "grad_norm": 3.225741430117561, + "learning_rate": 1.941621881763175e-06, + "loss": 0.547, + "step": 10245 + }, + { + "epoch": 0.4227944210613188, + "grad_norm": 2.6175752532312297, + "learning_rate": 1.941430287352548e-06, + "loss": 0.5785, + "step": 10246 + }, + { + "epoch": 0.42283568540067673, + "grad_norm": 308.29516150518015, + "learning_rate": 1.941238685056872e-06, + "loss": 0.5569, + "step": 10247 + }, + { + "epoch": 0.42287694974003465, + "grad_norm": 2.492142513555084, + "learning_rate": 1.941047074879569e-06, + "loss": 0.5397, + "step": 10248 + }, + { + "epoch": 0.42291821407939256, + "grad_norm": 4.42941343953659, + "learning_rate": 1.940855456824063e-06, + "loss": 0.5454, + "step": 10249 + }, + { + "epoch": 0.42295947841875053, + "grad_norm": 2.8239606435534954, + "learning_rate": 1.940663830893775e-06, + "loss": 0.5391, + "step": 10250 + }, + { + "epoch": 0.42300074275810845, + "grad_norm": 2.8927775933182267, + "learning_rate": 1.940472197092129e-06, + "loss": 0.4833, + "step": 10251 + }, + { + "epoch": 0.42304200709746637, + "grad_norm": 3.6671695514363, + "learning_rate": 1.940280555422547e-06, + "loss": 0.5024, + "step": 10252 + }, + { + "epoch": 0.4230832714368243, + "grad_norm": 2.3212624326084232, + "learning_rate": 1.940088905888453e-06, + "loss": 0.5462, + "step": 10253 + }, + { + "epoch": 0.4231245357761822, + "grad_norm": 4.31410967876102, + "learning_rate": 1.9398972484932705e-06, + "loss": 0.5075, + "step": 10254 + }, + { + "epoch": 0.42316580011554017, + "grad_norm": 3.233005311472926, + "learning_rate": 1.9397055832404225e-06, + "loss": 0.602, + "step": 10255 + }, + { + "epoch": 0.4232070644548981, + "grad_norm": 2.9027880026191544, + "learning_rate": 1.939513910133333e-06, + "loss": 0.5056, + "step": 10256 + }, + { + "epoch": 0.423248328794256, + "grad_norm": 2.3183159739810004, + "learning_rate": 1.9393222291754255e-06, + "loss": 0.4867, + "step": 10257 + }, + { + "epoch": 0.4232895931336139, + "grad_norm": 3.8227281894578535, + "learning_rate": 1.9391305403701238e-06, + "loss": 0.5495, + "step": 10258 + }, + { + "epoch": 0.42333085747297183, + "grad_norm": 2.4144517378897126, + "learning_rate": 1.9389388437208523e-06, + "loss": 0.4933, + "step": 10259 + }, + { + "epoch": 0.4233721218123298, + "grad_norm": 3.3416065417315806, + "learning_rate": 1.9387471392310348e-06, + "loss": 0.5629, + "step": 10260 + }, + { + "epoch": 0.4234133861516877, + "grad_norm": 2.2607052538501184, + "learning_rate": 1.9385554269040957e-06, + "loss": 0.5875, + "step": 10261 + }, + { + "epoch": 0.42345465049104564, + "grad_norm": 5.373029324518916, + "learning_rate": 1.9383637067434597e-06, + "loss": 0.4923, + "step": 10262 + }, + { + "epoch": 0.42349591483040355, + "grad_norm": 4.741145774829837, + "learning_rate": 1.938171978752552e-06, + "loss": 0.5122, + "step": 10263 + }, + { + "epoch": 0.42353717916976147, + "grad_norm": 2.494282830276885, + "learning_rate": 1.9379802429347957e-06, + "loss": 0.471, + "step": 10264 + }, + { + "epoch": 0.42357844350911944, + "grad_norm": 2.3483466749898123, + "learning_rate": 1.9377884992936174e-06, + "loss": 0.4714, + "step": 10265 + }, + { + "epoch": 0.42361970784847736, + "grad_norm": 2.2751394055428085, + "learning_rate": 1.9375967478324403e-06, + "loss": 0.421, + "step": 10266 + }, + { + "epoch": 0.42366097218783527, + "grad_norm": 6.118513322546039, + "learning_rate": 1.937404988554691e-06, + "loss": 0.5333, + "step": 10267 + }, + { + "epoch": 0.4237022365271932, + "grad_norm": 2.502279343704895, + "learning_rate": 1.937213221463795e-06, + "loss": 0.5794, + "step": 10268 + }, + { + "epoch": 0.4237435008665511, + "grad_norm": 2.010965699576202, + "learning_rate": 1.9370214465631765e-06, + "loss": 0.4914, + "step": 10269 + }, + { + "epoch": 0.4237847652059091, + "grad_norm": 2.9782714898413234, + "learning_rate": 1.9368296638562617e-06, + "loss": 0.5489, + "step": 10270 + }, + { + "epoch": 0.423826029545267, + "grad_norm": 3.911624742710557, + "learning_rate": 1.9366378733464766e-06, + "loss": 0.5922, + "step": 10271 + }, + { + "epoch": 0.4238672938846249, + "grad_norm": 4.3299429793191555, + "learning_rate": 1.9364460750372463e-06, + "loss": 0.5303, + "step": 10272 + }, + { + "epoch": 0.4239085582239828, + "grad_norm": 3.7769397539572624, + "learning_rate": 1.9362542689319977e-06, + "loss": 0.5575, + "step": 10273 + }, + { + "epoch": 0.42394982256334074, + "grad_norm": 2.676658520793615, + "learning_rate": 1.9360624550341564e-06, + "loss": 0.5445, + "step": 10274 + }, + { + "epoch": 0.4239910869026987, + "grad_norm": 12.83007809700465, + "learning_rate": 1.9358706333471493e-06, + "loss": 0.5462, + "step": 10275 + }, + { + "epoch": 0.4240323512420566, + "grad_norm": 7.54696072595811, + "learning_rate": 1.9356788038744017e-06, + "loss": 0.5016, + "step": 10276 + }, + { + "epoch": 0.42407361558141454, + "grad_norm": 9.976243627239798, + "learning_rate": 1.9354869666193406e-06, + "loss": 0.513, + "step": 10277 + }, + { + "epoch": 0.42411487992077246, + "grad_norm": 8.437658100948013, + "learning_rate": 1.9352951215853933e-06, + "loss": 0.5056, + "step": 10278 + }, + { + "epoch": 0.4241561442601304, + "grad_norm": 2.7982009302403066, + "learning_rate": 1.935103268775986e-06, + "loss": 0.5395, + "step": 10279 + }, + { + "epoch": 0.42419740859948835, + "grad_norm": 9.198874934264817, + "learning_rate": 1.934911408194546e-06, + "loss": 0.5273, + "step": 10280 + }, + { + "epoch": 0.42423867293884626, + "grad_norm": 3.3744275789678664, + "learning_rate": 1.9347195398445e-06, + "loss": 0.5392, + "step": 10281 + }, + { + "epoch": 0.4242799372782042, + "grad_norm": 3.7348744722159735, + "learning_rate": 1.9345276637292754e-06, + "loss": 0.5416, + "step": 10282 + }, + { + "epoch": 0.4243212016175621, + "grad_norm": 9.670999342641021, + "learning_rate": 1.9343357798523004e-06, + "loss": 0.5455, + "step": 10283 + }, + { + "epoch": 0.42436246595692, + "grad_norm": 99.22527217542813, + "learning_rate": 1.9341438882170016e-06, + "loss": 0.606, + "step": 10284 + }, + { + "epoch": 0.424403730296278, + "grad_norm": 2.376946709632659, + "learning_rate": 1.9339519888268066e-06, + "loss": 0.4953, + "step": 10285 + }, + { + "epoch": 0.4244449946356359, + "grad_norm": 2.4833458890179476, + "learning_rate": 1.9337600816851433e-06, + "loss": 0.4956, + "step": 10286 + }, + { + "epoch": 0.4244862589749938, + "grad_norm": 4.2635305619702395, + "learning_rate": 1.93356816679544e-06, + "loss": 0.4909, + "step": 10287 + }, + { + "epoch": 0.42452752331435173, + "grad_norm": 3.844017204439797, + "learning_rate": 1.933376244161125e-06, + "loss": 0.5783, + "step": 10288 + }, + { + "epoch": 0.42456878765370965, + "grad_norm": 4.05265562674214, + "learning_rate": 1.933184313785626e-06, + "loss": 0.4516, + "step": 10289 + }, + { + "epoch": 0.4246100519930676, + "grad_norm": 3.6786597890269457, + "learning_rate": 1.9329923756723717e-06, + "loss": 0.5283, + "step": 10290 + }, + { + "epoch": 0.42465131633242553, + "grad_norm": 2.8043122488323187, + "learning_rate": 1.9328004298247896e-06, + "loss": 0.5122, + "step": 10291 + }, + { + "epoch": 0.42469258067178345, + "grad_norm": 9.197227570035297, + "learning_rate": 1.9326084762463097e-06, + "loss": 0.5477, + "step": 10292 + }, + { + "epoch": 0.42473384501114136, + "grad_norm": 3.2862794948024123, + "learning_rate": 1.9324165149403598e-06, + "loss": 0.5342, + "step": 10293 + }, + { + "epoch": 0.4247751093504993, + "grad_norm": 3.7428784196512495, + "learning_rate": 1.93222454591037e-06, + "loss": 0.563, + "step": 10294 + }, + { + "epoch": 0.42481637368985725, + "grad_norm": 2.072393102962869, + "learning_rate": 1.9320325691597675e-06, + "loss": 0.536, + "step": 10295 + }, + { + "epoch": 0.42485763802921517, + "grad_norm": 9.416662143261929, + "learning_rate": 1.9318405846919837e-06, + "loss": 0.5748, + "step": 10296 + }, + { + "epoch": 0.4248989023685731, + "grad_norm": 5.606967057017315, + "learning_rate": 1.9316485925104457e-06, + "loss": 0.6099, + "step": 10297 + }, + { + "epoch": 0.424940166707931, + "grad_norm": 4.617698958391244, + "learning_rate": 1.931456592618585e-06, + "loss": 0.4938, + "step": 10298 + }, + { + "epoch": 0.4249814310472889, + "grad_norm": 3.8967028024787376, + "learning_rate": 1.9312645850198293e-06, + "loss": 0.5855, + "step": 10299 + }, + { + "epoch": 0.4250226953866469, + "grad_norm": 5.017100548860682, + "learning_rate": 1.9310725697176096e-06, + "loss": 0.473, + "step": 10300 + }, + { + "epoch": 0.4250639597260048, + "grad_norm": 7.936884662501434, + "learning_rate": 1.9308805467153553e-06, + "loss": 0.486, + "step": 10301 + }, + { + "epoch": 0.4251052240653627, + "grad_norm": 3.5273565957666264, + "learning_rate": 1.9306885160164975e-06, + "loss": 0.5381, + "step": 10302 + }, + { + "epoch": 0.42514648840472063, + "grad_norm": 3.3645619023319138, + "learning_rate": 1.930496477624464e-06, + "loss": 0.5522, + "step": 10303 + }, + { + "epoch": 0.42518775274407855, + "grad_norm": 4.354781399711287, + "learning_rate": 1.9303044315426873e-06, + "loss": 0.5215, + "step": 10304 + }, + { + "epoch": 0.42522901708343647, + "grad_norm": 4.027559511581707, + "learning_rate": 1.9301123777745967e-06, + "loss": 0.542, + "step": 10305 + }, + { + "epoch": 0.42527028142279444, + "grad_norm": 8.34302068061005, + "learning_rate": 1.9299203163236233e-06, + "loss": 0.5654, + "step": 10306 + }, + { + "epoch": 0.42531154576215235, + "grad_norm": 2.7178388306309684, + "learning_rate": 1.929728247193197e-06, + "loss": 0.5465, + "step": 10307 + }, + { + "epoch": 0.42535281010151027, + "grad_norm": 3.271545375091204, + "learning_rate": 1.9295361703867498e-06, + "loss": 0.5245, + "step": 10308 + }, + { + "epoch": 0.4253940744408682, + "grad_norm": 3.848335863520373, + "learning_rate": 1.929344085907712e-06, + "loss": 0.5688, + "step": 10309 + }, + { + "epoch": 0.4254353387802261, + "grad_norm": 4.041723527703376, + "learning_rate": 1.9291519937595144e-06, + "loss": 0.5245, + "step": 10310 + }, + { + "epoch": 0.4254766031195841, + "grad_norm": 7.833777818860869, + "learning_rate": 1.9289598939455884e-06, + "loss": 0.568, + "step": 10311 + }, + { + "epoch": 0.425517867458942, + "grad_norm": 2.31091950727255, + "learning_rate": 1.9287677864693667e-06, + "loss": 0.4949, + "step": 10312 + }, + { + "epoch": 0.4255591317982999, + "grad_norm": 6.164191272337679, + "learning_rate": 1.928575671334279e-06, + "loss": 0.5067, + "step": 10313 + }, + { + "epoch": 0.4256003961376578, + "grad_norm": 3.3565380364159965, + "learning_rate": 1.9283835485437575e-06, + "loss": 0.5247, + "step": 10314 + }, + { + "epoch": 0.42564166047701574, + "grad_norm": 4.077351312670042, + "learning_rate": 1.9281914181012344e-06, + "loss": 0.5301, + "step": 10315 + }, + { + "epoch": 0.4256829248163737, + "grad_norm": 2.2814577245591297, + "learning_rate": 1.927999280010141e-06, + "loss": 0.5168, + "step": 10316 + }, + { + "epoch": 0.4257241891557316, + "grad_norm": 3.6672933592791748, + "learning_rate": 1.9278071342739104e-06, + "loss": 0.4741, + "step": 10317 + }, + { + "epoch": 0.42576545349508954, + "grad_norm": 4.527817069764431, + "learning_rate": 1.927614980895974e-06, + "loss": 0.5341, + "step": 10318 + }, + { + "epoch": 0.42580671783444746, + "grad_norm": 3.433112811689708, + "learning_rate": 1.9274228198797643e-06, + "loss": 0.5131, + "step": 10319 + }, + { + "epoch": 0.4258479821738054, + "grad_norm": 3.9845926675540233, + "learning_rate": 1.9272306512287136e-06, + "loss": 0.5879, + "step": 10320 + }, + { + "epoch": 0.42588924651316334, + "grad_norm": 7.3282588107312705, + "learning_rate": 1.9270384749462548e-06, + "loss": 0.4833, + "step": 10321 + }, + { + "epoch": 0.42593051085252126, + "grad_norm": 3.7349501488104004, + "learning_rate": 1.9268462910358204e-06, + "loss": 0.5565, + "step": 10322 + }, + { + "epoch": 0.4259717751918792, + "grad_norm": 3.5064786860402943, + "learning_rate": 1.926654099500844e-06, + "loss": 0.5523, + "step": 10323 + }, + { + "epoch": 0.4260130395312371, + "grad_norm": 4.397952245781338, + "learning_rate": 1.926461900344757e-06, + "loss": 0.5337, + "step": 10324 + }, + { + "epoch": 0.426054303870595, + "grad_norm": 3.5470631819212914, + "learning_rate": 1.926269693570995e-06, + "loss": 0.5178, + "step": 10325 + }, + { + "epoch": 0.426095568209953, + "grad_norm": 3.127780188105823, + "learning_rate": 1.926077479182989e-06, + "loss": 0.5305, + "step": 10326 + }, + { + "epoch": 0.4261368325493109, + "grad_norm": 4.463490529869998, + "learning_rate": 1.9258852571841736e-06, + "loss": 0.5204, + "step": 10327 + }, + { + "epoch": 0.4261780968886688, + "grad_norm": 10.372301510472898, + "learning_rate": 1.9256930275779817e-06, + "loss": 0.5605, + "step": 10328 + }, + { + "epoch": 0.4262193612280267, + "grad_norm": 2.6752508977829534, + "learning_rate": 1.9255007903678483e-06, + "loss": 0.5634, + "step": 10329 + }, + { + "epoch": 0.42626062556738464, + "grad_norm": 3.663789533025965, + "learning_rate": 1.9253085455572054e-06, + "loss": 0.5304, + "step": 10330 + }, + { + "epoch": 0.4263018899067426, + "grad_norm": 7.960042317087837, + "learning_rate": 1.9251162931494883e-06, + "loss": 0.5558, + "step": 10331 + }, + { + "epoch": 0.42634315424610053, + "grad_norm": 3.1923750076405604, + "learning_rate": 1.9249240331481306e-06, + "loss": 0.5427, + "step": 10332 + }, + { + "epoch": 0.42638441858545845, + "grad_norm": 2.0661509380802947, + "learning_rate": 1.924731765556567e-06, + "loss": 0.4743, + "step": 10333 + }, + { + "epoch": 0.42642568292481636, + "grad_norm": 7.7369102657666255, + "learning_rate": 1.924539490378231e-06, + "loss": 0.5181, + "step": 10334 + }, + { + "epoch": 0.4264669472641743, + "grad_norm": 11.148400077133205, + "learning_rate": 1.9243472076165584e-06, + "loss": 0.576, + "step": 10335 + }, + { + "epoch": 0.42650821160353225, + "grad_norm": 2.4652991877575343, + "learning_rate": 1.924154917274983e-06, + "loss": 0.5397, + "step": 10336 + }, + { + "epoch": 0.42654947594289017, + "grad_norm": 2.3591181038031195, + "learning_rate": 1.9239626193569394e-06, + "loss": 0.5999, + "step": 10337 + }, + { + "epoch": 0.4265907402822481, + "grad_norm": 2.7794881341487137, + "learning_rate": 1.9237703138658624e-06, + "loss": 0.5326, + "step": 10338 + }, + { + "epoch": 0.426632004621606, + "grad_norm": 8.28340135956897, + "learning_rate": 1.923578000805188e-06, + "loss": 0.5536, + "step": 10339 + }, + { + "epoch": 0.4266732689609639, + "grad_norm": 2.5818053490629036, + "learning_rate": 1.923385680178351e-06, + "loss": 0.4551, + "step": 10340 + }, + { + "epoch": 0.4267145333003219, + "grad_norm": 3.499114163085981, + "learning_rate": 1.9231933519887866e-06, + "loss": 0.5194, + "step": 10341 + }, + { + "epoch": 0.4267557976396798, + "grad_norm": 2.9819851337224956, + "learning_rate": 1.92300101623993e-06, + "loss": 0.5953, + "step": 10342 + }, + { + "epoch": 0.4267970619790377, + "grad_norm": 3.6979118077541946, + "learning_rate": 1.9228086729352173e-06, + "loss": 0.5386, + "step": 10343 + }, + { + "epoch": 0.42683832631839563, + "grad_norm": 5.317695496646703, + "learning_rate": 1.9226163220780835e-06, + "loss": 0.5188, + "step": 10344 + }, + { + "epoch": 0.42687959065775355, + "grad_norm": 4.572309809142145, + "learning_rate": 1.9224239636719654e-06, + "loss": 0.5374, + "step": 10345 + }, + { + "epoch": 0.4269208549971115, + "grad_norm": 6.209392337389534, + "learning_rate": 1.9222315977202983e-06, + "loss": 0.5404, + "step": 10346 + }, + { + "epoch": 0.42696211933646944, + "grad_norm": 2.025078869086432, + "learning_rate": 1.922039224226519e-06, + "loss": 0.5346, + "step": 10347 + }, + { + "epoch": 0.42700338367582735, + "grad_norm": 13.587890320079978, + "learning_rate": 1.921846843194063e-06, + "loss": 0.5382, + "step": 10348 + }, + { + "epoch": 0.42704464801518527, + "grad_norm": 5.461106633594581, + "learning_rate": 1.9216544546263676e-06, + "loss": 0.5841, + "step": 10349 + }, + { + "epoch": 0.4270859123545432, + "grad_norm": 3.0078505156453073, + "learning_rate": 1.9214620585268684e-06, + "loss": 0.5258, + "step": 10350 + }, + { + "epoch": 0.42712717669390116, + "grad_norm": 3.92527214041552, + "learning_rate": 1.9212696548990026e-06, + "loss": 0.5115, + "step": 10351 + }, + { + "epoch": 0.42716844103325907, + "grad_norm": 3.3121896174903456, + "learning_rate": 1.921077243746207e-06, + "loss": 0.5613, + "step": 10352 + }, + { + "epoch": 0.427209705372617, + "grad_norm": 5.293917862511308, + "learning_rate": 1.920884825071918e-06, + "loss": 0.5064, + "step": 10353 + }, + { + "epoch": 0.4272509697119749, + "grad_norm": 5.370913269813519, + "learning_rate": 1.9206923988795737e-06, + "loss": 0.4752, + "step": 10354 + }, + { + "epoch": 0.4272922340513328, + "grad_norm": 6.470013490526008, + "learning_rate": 1.9204999651726103e-06, + "loss": 0.5004, + "step": 10355 + }, + { + "epoch": 0.4273334983906908, + "grad_norm": 5.167941503582471, + "learning_rate": 1.920307523954466e-06, + "loss": 0.533, + "step": 10356 + }, + { + "epoch": 0.4273747627300487, + "grad_norm": 2.970850822894107, + "learning_rate": 1.9201150752285776e-06, + "loss": 0.5888, + "step": 10357 + }, + { + "epoch": 0.4274160270694066, + "grad_norm": 5.692081912073803, + "learning_rate": 1.919922618998383e-06, + "loss": 0.5126, + "step": 10358 + }, + { + "epoch": 0.42745729140876454, + "grad_norm": 5.167304410313078, + "learning_rate": 1.91973015526732e-06, + "loss": 0.5744, + "step": 10359 + }, + { + "epoch": 0.42749855574812246, + "grad_norm": 3.702158950681499, + "learning_rate": 1.9195376840388266e-06, + "loss": 0.4459, + "step": 10360 + }, + { + "epoch": 0.4275398200874804, + "grad_norm": 5.038541099167359, + "learning_rate": 1.9193452053163405e-06, + "loss": 0.559, + "step": 10361 + }, + { + "epoch": 0.42758108442683834, + "grad_norm": 5.635545818562593, + "learning_rate": 1.9191527191033002e-06, + "loss": 0.546, + "step": 10362 + }, + { + "epoch": 0.42762234876619626, + "grad_norm": 2.365301991285693, + "learning_rate": 1.918960225403143e-06, + "loss": 0.5367, + "step": 10363 + }, + { + "epoch": 0.4276636131055542, + "grad_norm": 2.7988293610954784, + "learning_rate": 1.9187677242193087e-06, + "loss": 0.5368, + "step": 10364 + }, + { + "epoch": 0.4277048774449121, + "grad_norm": 8.035431145303665, + "learning_rate": 1.918575215555235e-06, + "loss": 0.5053, + "step": 10365 + }, + { + "epoch": 0.42774614178427, + "grad_norm": 4.038376733815661, + "learning_rate": 1.918382699414361e-06, + "loss": 0.5266, + "step": 10366 + }, + { + "epoch": 0.427787406123628, + "grad_norm": 3.226298177564978, + "learning_rate": 1.9181901758001253e-06, + "loss": 0.4783, + "step": 10367 + }, + { + "epoch": 0.4278286704629859, + "grad_norm": 3.8803800944194413, + "learning_rate": 1.9179976447159673e-06, + "loss": 0.4818, + "step": 10368 + }, + { + "epoch": 0.4278699348023438, + "grad_norm": 6.144038881725554, + "learning_rate": 1.9178051061653253e-06, + "loss": 0.5361, + "step": 10369 + }, + { + "epoch": 0.4279111991417017, + "grad_norm": 7.476984731076386, + "learning_rate": 1.917612560151639e-06, + "loss": 0.6234, + "step": 10370 + }, + { + "epoch": 0.42795246348105964, + "grad_norm": 5.9306333526881865, + "learning_rate": 1.9174200066783476e-06, + "loss": 0.5089, + "step": 10371 + }, + { + "epoch": 0.4279937278204176, + "grad_norm": 3.0285323289457997, + "learning_rate": 1.917227445748891e-06, + "loss": 0.4672, + "step": 10372 + }, + { + "epoch": 0.42803499215977553, + "grad_norm": 3.0916913315914765, + "learning_rate": 1.917034877366708e-06, + "loss": 0.5422, + "step": 10373 + }, + { + "epoch": 0.42807625649913345, + "grad_norm": 5.250123384253045, + "learning_rate": 1.916842301535239e-06, + "loss": 0.5081, + "step": 10374 + }, + { + "epoch": 0.42811752083849136, + "grad_norm": 2.905924466377658, + "learning_rate": 1.916649718257924e-06, + "loss": 0.5332, + "step": 10375 + }, + { + "epoch": 0.4281587851778493, + "grad_norm": 11.655253813356696, + "learning_rate": 1.9164571275382024e-06, + "loss": 0.5641, + "step": 10376 + }, + { + "epoch": 0.42820004951720725, + "grad_norm": 5.544564835762928, + "learning_rate": 1.9162645293795145e-06, + "loss": 0.5388, + "step": 10377 + }, + { + "epoch": 0.42824131385656516, + "grad_norm": 2.7336143861367175, + "learning_rate": 1.9160719237853013e-06, + "loss": 0.5265, + "step": 10378 + }, + { + "epoch": 0.4282825781959231, + "grad_norm": 7.80980561968432, + "learning_rate": 1.915879310759002e-06, + "loss": 0.5576, + "step": 10379 + }, + { + "epoch": 0.428323842535281, + "grad_norm": 3.7518605660470388, + "learning_rate": 1.9156866903040585e-06, + "loss": 0.5003, + "step": 10380 + }, + { + "epoch": 0.4283651068746389, + "grad_norm": 2.5949274453393554, + "learning_rate": 1.9154940624239107e-06, + "loss": 0.549, + "step": 10381 + }, + { + "epoch": 0.4284063712139969, + "grad_norm": 10.812128363701435, + "learning_rate": 1.9153014271219993e-06, + "loss": 0.4661, + "step": 10382 + }, + { + "epoch": 0.4284476355533548, + "grad_norm": 4.60455598468637, + "learning_rate": 1.9151087844017657e-06, + "loss": 0.5443, + "step": 10383 + }, + { + "epoch": 0.4284888998927127, + "grad_norm": 7.722387736423089, + "learning_rate": 1.9149161342666506e-06, + "loss": 0.5134, + "step": 10384 + }, + { + "epoch": 0.42853016423207063, + "grad_norm": 6.18349795643417, + "learning_rate": 1.9147234767200954e-06, + "loss": 0.4981, + "step": 10385 + }, + { + "epoch": 0.42857142857142855, + "grad_norm": 3.38545611575143, + "learning_rate": 1.9145308117655413e-06, + "loss": 0.5235, + "step": 10386 + }, + { + "epoch": 0.4286126929107865, + "grad_norm": 2.9714395557329905, + "learning_rate": 1.91433813940643e-06, + "loss": 0.4932, + "step": 10387 + }, + { + "epoch": 0.42865395725014444, + "grad_norm": 10.118890313686611, + "learning_rate": 1.914145459646204e-06, + "loss": 0.5674, + "step": 10388 + }, + { + "epoch": 0.42869522158950235, + "grad_norm": 3.637722779872876, + "learning_rate": 1.9139527724883026e-06, + "loss": 0.4876, + "step": 10389 + }, + { + "epoch": 0.42873648592886027, + "grad_norm": 3.6234283537662915, + "learning_rate": 1.9137600779361697e-06, + "loss": 0.5437, + "step": 10390 + }, + { + "epoch": 0.4287777502682182, + "grad_norm": 6.066248299679183, + "learning_rate": 1.9135673759932466e-06, + "loss": 0.5322, + "step": 10391 + }, + { + "epoch": 0.42881901460757615, + "grad_norm": 9.748613076540579, + "learning_rate": 1.9133746666629756e-06, + "loss": 0.5889, + "step": 10392 + }, + { + "epoch": 0.42886027894693407, + "grad_norm": 3.493010008472028, + "learning_rate": 1.913181949948799e-06, + "loss": 0.4837, + "step": 10393 + }, + { + "epoch": 0.428901543286292, + "grad_norm": 3.564030404007652, + "learning_rate": 1.9129892258541594e-06, + "loss": 0.5401, + "step": 10394 + }, + { + "epoch": 0.4289428076256499, + "grad_norm": 5.94149892663687, + "learning_rate": 1.912796494382499e-06, + "loss": 0.5924, + "step": 10395 + }, + { + "epoch": 0.4289840719650078, + "grad_norm": 4.131149870155571, + "learning_rate": 1.9126037555372606e-06, + "loss": 0.5903, + "step": 10396 + }, + { + "epoch": 0.4290253363043658, + "grad_norm": 5.523423683149776, + "learning_rate": 1.912411009321886e-06, + "loss": 0.4945, + "step": 10397 + }, + { + "epoch": 0.4290666006437237, + "grad_norm": 5.889017910411664, + "learning_rate": 1.91221825573982e-06, + "loss": 0.5082, + "step": 10398 + }, + { + "epoch": 0.4291078649830816, + "grad_norm": 3.7724688529614396, + "learning_rate": 1.9120254947945046e-06, + "loss": 0.5877, + "step": 10399 + }, + { + "epoch": 0.42914912932243954, + "grad_norm": 2.6952335309497637, + "learning_rate": 1.9118327264893834e-06, + "loss": 0.5277, + "step": 10400 + }, + { + "epoch": 0.42919039366179745, + "grad_norm": 3.4458085687110325, + "learning_rate": 1.9116399508278997e-06, + "loss": 0.5213, + "step": 10401 + }, + { + "epoch": 0.4292316580011554, + "grad_norm": 3.561191341343425, + "learning_rate": 1.9114471678134957e-06, + "loss": 0.5456, + "step": 10402 + }, + { + "epoch": 0.42927292234051334, + "grad_norm": 3.7827433438145115, + "learning_rate": 1.9112543774496163e-06, + "loss": 0.5229, + "step": 10403 + }, + { + "epoch": 0.42931418667987126, + "grad_norm": 4.546599965728423, + "learning_rate": 1.9110615797397057e-06, + "loss": 0.5752, + "step": 10404 + }, + { + "epoch": 0.4293554510192292, + "grad_norm": 3.4548482697907144, + "learning_rate": 1.910868774687206e-06, + "loss": 0.4893, + "step": 10405 + }, + { + "epoch": 0.4293967153585871, + "grad_norm": 13.971957792269865, + "learning_rate": 1.9106759622955624e-06, + "loss": 0.5292, + "step": 10406 + }, + { + "epoch": 0.42943797969794506, + "grad_norm": 5.510200261817262, + "learning_rate": 1.910483142568219e-06, + "loss": 0.5272, + "step": 10407 + }, + { + "epoch": 0.429479244037303, + "grad_norm": 3.3967244978778903, + "learning_rate": 1.91029031550862e-06, + "loss": 0.5231, + "step": 10408 + }, + { + "epoch": 0.4295205083766609, + "grad_norm": 2.6772476672572707, + "learning_rate": 1.9100974811202094e-06, + "loss": 0.5402, + "step": 10409 + }, + { + "epoch": 0.4295617727160188, + "grad_norm": 4.010754915293304, + "learning_rate": 1.9099046394064315e-06, + "loss": 0.5494, + "step": 10410 + }, + { + "epoch": 0.4296030370553767, + "grad_norm": 4.220919706460052, + "learning_rate": 1.909711790370732e-06, + "loss": 0.5637, + "step": 10411 + }, + { + "epoch": 0.4296443013947347, + "grad_norm": 3.754062527932861, + "learning_rate": 1.9095189340165544e-06, + "loss": 0.5472, + "step": 10412 + }, + { + "epoch": 0.4296855657340926, + "grad_norm": 2.2144244170864527, + "learning_rate": 1.9093260703473436e-06, + "loss": 0.51, + "step": 10413 + }, + { + "epoch": 0.4297268300734505, + "grad_norm": 2.784311318244003, + "learning_rate": 1.9091331993665466e-06, + "loss": 0.56, + "step": 10414 + }, + { + "epoch": 0.42976809441280844, + "grad_norm": 3.428402117810713, + "learning_rate": 1.9089403210776065e-06, + "loss": 0.5492, + "step": 10415 + }, + { + "epoch": 0.42980935875216636, + "grad_norm": 5.819891688041701, + "learning_rate": 1.908747435483969e-06, + "loss": 0.5458, + "step": 10416 + }, + { + "epoch": 0.42985062309152433, + "grad_norm": 4.22326215491952, + "learning_rate": 1.9085545425890804e-06, + "loss": 0.4859, + "step": 10417 + }, + { + "epoch": 0.42989188743088225, + "grad_norm": 3.3323206085590322, + "learning_rate": 1.908361642396385e-06, + "loss": 0.4887, + "step": 10418 + }, + { + "epoch": 0.42993315177024016, + "grad_norm": 5.387933378243077, + "learning_rate": 1.9081687349093293e-06, + "loss": 0.5406, + "step": 10419 + }, + { + "epoch": 0.4299744161095981, + "grad_norm": 9.624805175216226, + "learning_rate": 1.907975820131359e-06, + "loss": 0.5642, + "step": 10420 + }, + { + "epoch": 0.430015680448956, + "grad_norm": 6.196981552581285, + "learning_rate": 1.90778289806592e-06, + "loss": 0.5329, + "step": 10421 + }, + { + "epoch": 0.43005694478831397, + "grad_norm": 3.2998156175821642, + "learning_rate": 1.9075899687164584e-06, + "loss": 0.5439, + "step": 10422 + }, + { + "epoch": 0.4300982091276719, + "grad_norm": 1.7661405505270371, + "learning_rate": 1.90739703208642e-06, + "loss": 0.519, + "step": 10423 + }, + { + "epoch": 0.4301394734670298, + "grad_norm": 3.969624055812896, + "learning_rate": 1.9072040881792516e-06, + "loss": 0.617, + "step": 10424 + }, + { + "epoch": 0.4301807378063877, + "grad_norm": 2.780354621604151, + "learning_rate": 1.9070111369983997e-06, + "loss": 0.5328, + "step": 10425 + }, + { + "epoch": 0.43022200214574563, + "grad_norm": 12.832913393258357, + "learning_rate": 1.9068181785473103e-06, + "loss": 0.5001, + "step": 10426 + }, + { + "epoch": 0.43026326648510355, + "grad_norm": 2.568182231169817, + "learning_rate": 1.906625212829431e-06, + "loss": 0.4988, + "step": 10427 + }, + { + "epoch": 0.4303045308244615, + "grad_norm": 7.875131644933475, + "learning_rate": 1.906432239848208e-06, + "loss": 0.5928, + "step": 10428 + }, + { + "epoch": 0.43034579516381943, + "grad_norm": 2.599369286999496, + "learning_rate": 1.9062392596070885e-06, + "loss": 0.5569, + "step": 10429 + }, + { + "epoch": 0.43038705950317735, + "grad_norm": 3.852110900650468, + "learning_rate": 1.9060462721095192e-06, + "loss": 0.5295, + "step": 10430 + }, + { + "epoch": 0.43042832384253527, + "grad_norm": 12.754174281622216, + "learning_rate": 1.9058532773589483e-06, + "loss": 0.5625, + "step": 10431 + }, + { + "epoch": 0.4304695881818932, + "grad_norm": 3.631624043757976, + "learning_rate": 1.9056602753588218e-06, + "loss": 0.5127, + "step": 10432 + }, + { + "epoch": 0.43051085252125115, + "grad_norm": 5.2503809070766145, + "learning_rate": 1.9054672661125887e-06, + "loss": 0.4639, + "step": 10433 + }, + { + "epoch": 0.43055211686060907, + "grad_norm": 3.0440301550570754, + "learning_rate": 1.9052742496236958e-06, + "loss": 0.5172, + "step": 10434 + }, + { + "epoch": 0.430593381199967, + "grad_norm": 3.590172148530719, + "learning_rate": 1.9050812258955905e-06, + "loss": 0.5246, + "step": 10435 + }, + { + "epoch": 0.4306346455393249, + "grad_norm": 2.295140296270766, + "learning_rate": 1.9048881949317218e-06, + "loss": 0.4719, + "step": 10436 + }, + { + "epoch": 0.4306759098786828, + "grad_norm": 29.20391117373665, + "learning_rate": 1.9046951567355363e-06, + "loss": 0.5637, + "step": 10437 + }, + { + "epoch": 0.4307171742180408, + "grad_norm": 2.8147273064361427, + "learning_rate": 1.9045021113104836e-06, + "loss": 0.507, + "step": 10438 + }, + { + "epoch": 0.4307584385573987, + "grad_norm": 3.2417090472412213, + "learning_rate": 1.904309058660011e-06, + "loss": 0.5231, + "step": 10439 + }, + { + "epoch": 0.4307997028967566, + "grad_norm": 7.843021342374844, + "learning_rate": 1.904115998787567e-06, + "loss": 0.5429, + "step": 10440 + }, + { + "epoch": 0.43084096723611454, + "grad_norm": 3.488253495809614, + "learning_rate": 1.9039229316966005e-06, + "loss": 0.5684, + "step": 10441 + }, + { + "epoch": 0.43088223157547245, + "grad_norm": 3.127148907410381, + "learning_rate": 1.9037298573905602e-06, + "loss": 0.5608, + "step": 10442 + }, + { + "epoch": 0.4309234959148304, + "grad_norm": 7.17935493491302, + "learning_rate": 1.9035367758728945e-06, + "loss": 0.4795, + "step": 10443 + }, + { + "epoch": 0.43096476025418834, + "grad_norm": 2.5680406883065343, + "learning_rate": 1.9033436871470526e-06, + "loss": 0.5439, + "step": 10444 + }, + { + "epoch": 0.43100602459354626, + "grad_norm": 4.645902397240397, + "learning_rate": 1.903150591216483e-06, + "loss": 0.5291, + "step": 10445 + }, + { + "epoch": 0.43104728893290417, + "grad_norm": 3.50838577087702, + "learning_rate": 1.9029574880846357e-06, + "loss": 0.5449, + "step": 10446 + }, + { + "epoch": 0.4310885532722621, + "grad_norm": 2.874751938721191, + "learning_rate": 1.9027643777549597e-06, + "loss": 0.5091, + "step": 10447 + }, + { + "epoch": 0.43112981761162006, + "grad_norm": 3.517054594980699, + "learning_rate": 1.902571260230904e-06, + "loss": 0.6048, + "step": 10448 + }, + { + "epoch": 0.431171081950978, + "grad_norm": 3.6449605409732135, + "learning_rate": 1.9023781355159184e-06, + "loss": 0.5873, + "step": 10449 + }, + { + "epoch": 0.4312123462903359, + "grad_norm": 52.32799816031075, + "learning_rate": 1.9021850036134531e-06, + "loss": 0.4974, + "step": 10450 + }, + { + "epoch": 0.4312536106296938, + "grad_norm": 2.9333758654933075, + "learning_rate": 1.9019918645269575e-06, + "loss": 0.5439, + "step": 10451 + }, + { + "epoch": 0.4312948749690517, + "grad_norm": 13.644955698976398, + "learning_rate": 1.9017987182598817e-06, + "loss": 0.52, + "step": 10452 + }, + { + "epoch": 0.4313361393084097, + "grad_norm": 3.1064642922614665, + "learning_rate": 1.9016055648156751e-06, + "loss": 0.5355, + "step": 10453 + }, + { + "epoch": 0.4313774036477676, + "grad_norm": 10.53842364428914, + "learning_rate": 1.901412404197789e-06, + "loss": 0.5372, + "step": 10454 + }, + { + "epoch": 0.4314186679871255, + "grad_norm": 5.020486137880235, + "learning_rate": 1.901219236409673e-06, + "loss": 0.4908, + "step": 10455 + }, + { + "epoch": 0.43145993232648344, + "grad_norm": 2.0512528316680037, + "learning_rate": 1.9010260614547778e-06, + "loss": 0.5054, + "step": 10456 + }, + { + "epoch": 0.43150119666584136, + "grad_norm": 3.8787284808624896, + "learning_rate": 1.9008328793365535e-06, + "loss": 0.4869, + "step": 10457 + }, + { + "epoch": 0.43154246100519933, + "grad_norm": 3.912308714734943, + "learning_rate": 1.9006396900584517e-06, + "loss": 0.5416, + "step": 10458 + }, + { + "epoch": 0.43158372534455725, + "grad_norm": 2.865505918359248, + "learning_rate": 1.9004464936239224e-06, + "loss": 0.5523, + "step": 10459 + }, + { + "epoch": 0.43162498968391516, + "grad_norm": 7.726619489070402, + "learning_rate": 1.900253290036418e-06, + "loss": 0.5693, + "step": 10460 + }, + { + "epoch": 0.4316662540232731, + "grad_norm": 9.342070108865023, + "learning_rate": 1.9000600792993873e-06, + "loss": 0.5469, + "step": 10461 + }, + { + "epoch": 0.431707518362631, + "grad_norm": 5.659714016354522, + "learning_rate": 1.8998668614162834e-06, + "loss": 0.5738, + "step": 10462 + }, + { + "epoch": 0.43174878270198896, + "grad_norm": 11.303245982261139, + "learning_rate": 1.8996736363905568e-06, + "loss": 0.5515, + "step": 10463 + }, + { + "epoch": 0.4317900470413469, + "grad_norm": 4.776190197117736, + "learning_rate": 1.8994804042256596e-06, + "loss": 0.5146, + "step": 10464 + }, + { + "epoch": 0.4318313113807048, + "grad_norm": 2.3856596692272625, + "learning_rate": 1.8992871649250426e-06, + "loss": 0.5715, + "step": 10465 + }, + { + "epoch": 0.4318725757200627, + "grad_norm": 9.977564134734658, + "learning_rate": 1.899093918492158e-06, + "loss": 0.5152, + "step": 10466 + }, + { + "epoch": 0.43191384005942063, + "grad_norm": 2.244455032242996, + "learning_rate": 1.8989006649304579e-06, + "loss": 0.5422, + "step": 10467 + }, + { + "epoch": 0.4319551043987786, + "grad_norm": 13.250772372630662, + "learning_rate": 1.8987074042433937e-06, + "loss": 0.5107, + "step": 10468 + }, + { + "epoch": 0.4319963687381365, + "grad_norm": 12.828244738956545, + "learning_rate": 1.8985141364344182e-06, + "loss": 0.5222, + "step": 10469 + }, + { + "epoch": 0.43203763307749443, + "grad_norm": 7.561635733859634, + "learning_rate": 1.8983208615069834e-06, + "loss": 0.5026, + "step": 10470 + }, + { + "epoch": 0.43207889741685235, + "grad_norm": 5.649833300234184, + "learning_rate": 1.898127579464541e-06, + "loss": 0.5031, + "step": 10471 + }, + { + "epoch": 0.43212016175621026, + "grad_norm": 3.396407998849075, + "learning_rate": 1.8979342903105445e-06, + "loss": 0.4769, + "step": 10472 + }, + { + "epoch": 0.43216142609556824, + "grad_norm": 3.350012125617923, + "learning_rate": 1.897740994048446e-06, + "loss": 0.5491, + "step": 10473 + }, + { + "epoch": 0.43220269043492615, + "grad_norm": 2.4776689661169327, + "learning_rate": 1.897547690681698e-06, + "loss": 0.5342, + "step": 10474 + }, + { + "epoch": 0.43224395477428407, + "grad_norm": 2.365366786866936, + "learning_rate": 1.8973543802137542e-06, + "loss": 0.516, + "step": 10475 + }, + { + "epoch": 0.432285219113642, + "grad_norm": 2.501618748869993, + "learning_rate": 1.8971610626480668e-06, + "loss": 0.5129, + "step": 10476 + }, + { + "epoch": 0.4323264834529999, + "grad_norm": 8.903653921014843, + "learning_rate": 1.8969677379880897e-06, + "loss": 0.4969, + "step": 10477 + }, + { + "epoch": 0.43236774779235787, + "grad_norm": 2.464535462642699, + "learning_rate": 1.8967744062372754e-06, + "loss": 0.5036, + "step": 10478 + }, + { + "epoch": 0.4324090121317158, + "grad_norm": 1.6990417891910257, + "learning_rate": 1.8965810673990777e-06, + "loss": 0.5207, + "step": 10479 + }, + { + "epoch": 0.4324502764710737, + "grad_norm": 6.12313157576521, + "learning_rate": 1.89638772147695e-06, + "loss": 0.4987, + "step": 10480 + }, + { + "epoch": 0.4324915408104316, + "grad_norm": 7.8448325374315955, + "learning_rate": 1.8961943684743466e-06, + "loss": 0.5484, + "step": 10481 + }, + { + "epoch": 0.43253280514978953, + "grad_norm": 17.05580617604151, + "learning_rate": 1.8960010083947199e-06, + "loss": 0.5335, + "step": 10482 + }, + { + "epoch": 0.4325740694891475, + "grad_norm": 3.385156607830155, + "learning_rate": 1.8958076412415247e-06, + "loss": 0.5232, + "step": 10483 + }, + { + "epoch": 0.4326153338285054, + "grad_norm": 3.0544794331208966, + "learning_rate": 1.8956142670182147e-06, + "loss": 0.5477, + "step": 10484 + }, + { + "epoch": 0.43265659816786334, + "grad_norm": 5.308875360879972, + "learning_rate": 1.8954208857282448e-06, + "loss": 0.5296, + "step": 10485 + }, + { + "epoch": 0.43269786250722125, + "grad_norm": 2.6738750743233535, + "learning_rate": 1.895227497375068e-06, + "loss": 0.5448, + "step": 10486 + }, + { + "epoch": 0.43273912684657917, + "grad_norm": 2.0344896984081333, + "learning_rate": 1.89503410196214e-06, + "loss": 0.4939, + "step": 10487 + }, + { + "epoch": 0.4327803911859371, + "grad_norm": 5.323748255255616, + "learning_rate": 1.8948406994929143e-06, + "loss": 0.5182, + "step": 10488 + }, + { + "epoch": 0.43282165552529506, + "grad_norm": 7.057130490372754, + "learning_rate": 1.8946472899708465e-06, + "loss": 0.5739, + "step": 10489 + }, + { + "epoch": 0.432862919864653, + "grad_norm": 2.2827198301006892, + "learning_rate": 1.8944538733993901e-06, + "loss": 0.5122, + "step": 10490 + }, + { + "epoch": 0.4329041842040109, + "grad_norm": 4.784410517041123, + "learning_rate": 1.8942604497820014e-06, + "loss": 0.4987, + "step": 10491 + }, + { + "epoch": 0.4329454485433688, + "grad_norm": 4.447513034381811, + "learning_rate": 1.8940670191221338e-06, + "loss": 0.632, + "step": 10492 + }, + { + "epoch": 0.4329867128827267, + "grad_norm": 2.9710558018904694, + "learning_rate": 1.8938735814232448e-06, + "loss": 0.5086, + "step": 10493 + }, + { + "epoch": 0.4330279772220847, + "grad_norm": 3.7251233248329734, + "learning_rate": 1.8936801366887874e-06, + "loss": 0.5223, + "step": 10494 + }, + { + "epoch": 0.4330692415614426, + "grad_norm": 3.6177792286048893, + "learning_rate": 1.8934866849222183e-06, + "loss": 0.4914, + "step": 10495 + }, + { + "epoch": 0.4331105059008005, + "grad_norm": 5.764087923455407, + "learning_rate": 1.8932932261269922e-06, + "loss": 0.5611, + "step": 10496 + }, + { + "epoch": 0.43315177024015844, + "grad_norm": 11.381971386682462, + "learning_rate": 1.8930997603065657e-06, + "loss": 0.5383, + "step": 10497 + }, + { + "epoch": 0.43319303457951636, + "grad_norm": 5.737396554389031, + "learning_rate": 1.8929062874643938e-06, + "loss": 0.5539, + "step": 10498 + }, + { + "epoch": 0.4332342989188743, + "grad_norm": 26.40078544611478, + "learning_rate": 1.8927128076039329e-06, + "loss": 0.5509, + "step": 10499 + }, + { + "epoch": 0.43327556325823224, + "grad_norm": 7.199297305980923, + "learning_rate": 1.8925193207286388e-06, + "loss": 0.5878, + "step": 10500 + }, + { + "epoch": 0.43331682759759016, + "grad_norm": 14.03324141994493, + "learning_rate": 1.8923258268419679e-06, + "loss": 0.4907, + "step": 10501 + }, + { + "epoch": 0.4333580919369481, + "grad_norm": 3.3010784945245653, + "learning_rate": 1.8921323259473758e-06, + "loss": 0.5026, + "step": 10502 + }, + { + "epoch": 0.433399356276306, + "grad_norm": 4.531789216988792, + "learning_rate": 1.8919388180483197e-06, + "loss": 0.5606, + "step": 10503 + }, + { + "epoch": 0.43344062061566396, + "grad_norm": 4.53995742000973, + "learning_rate": 1.8917453031482557e-06, + "loss": 0.5024, + "step": 10504 + }, + { + "epoch": 0.4334818849550219, + "grad_norm": 6.208299725152568, + "learning_rate": 1.8915517812506406e-06, + "loss": 0.5039, + "step": 10505 + }, + { + "epoch": 0.4335231492943798, + "grad_norm": 3.4361241307640937, + "learning_rate": 1.8913582523589315e-06, + "loss": 0.542, + "step": 10506 + }, + { + "epoch": 0.4335644136337377, + "grad_norm": 2.856310427877432, + "learning_rate": 1.8911647164765847e-06, + "loss": 0.4877, + "step": 10507 + }, + { + "epoch": 0.4336056779730956, + "grad_norm": 5.02536212261245, + "learning_rate": 1.8909711736070572e-06, + "loss": 0.4941, + "step": 10508 + }, + { + "epoch": 0.4336469423124536, + "grad_norm": 3.5387898401457063, + "learning_rate": 1.890777623753807e-06, + "loss": 0.5581, + "step": 10509 + }, + { + "epoch": 0.4336882066518115, + "grad_norm": 3.432442577144609, + "learning_rate": 1.8905840669202905e-06, + "loss": 0.506, + "step": 10510 + }, + { + "epoch": 0.43372947099116943, + "grad_norm": 2.9936940122973072, + "learning_rate": 1.890390503109966e-06, + "loss": 0.5711, + "step": 10511 + }, + { + "epoch": 0.43377073533052735, + "grad_norm": 3.3339008838655473, + "learning_rate": 1.8901969323262896e-06, + "loss": 0.5444, + "step": 10512 + }, + { + "epoch": 0.43381199966988526, + "grad_norm": 3.2157665157936934, + "learning_rate": 1.8900033545727207e-06, + "loss": 0.5207, + "step": 10513 + }, + { + "epoch": 0.43385326400924323, + "grad_norm": 3.2714884772319452, + "learning_rate": 1.889809769852716e-06, + "loss": 0.5668, + "step": 10514 + }, + { + "epoch": 0.43389452834860115, + "grad_norm": 3.3718421058176062, + "learning_rate": 1.8896161781697337e-06, + "loss": 0.5865, + "step": 10515 + }, + { + "epoch": 0.43393579268795907, + "grad_norm": 2.1885361355575923, + "learning_rate": 1.8894225795272318e-06, + "loss": 0.5112, + "step": 10516 + }, + { + "epoch": 0.433977057027317, + "grad_norm": 8.804029675902123, + "learning_rate": 1.889228973928668e-06, + "loss": 0.5638, + "step": 10517 + }, + { + "epoch": 0.4340183213666749, + "grad_norm": 4.45415916657379, + "learning_rate": 1.8890353613775013e-06, + "loss": 0.5324, + "step": 10518 + }, + { + "epoch": 0.43405958570603287, + "grad_norm": 3.934114806137266, + "learning_rate": 1.8888417418771902e-06, + "loss": 0.5535, + "step": 10519 + }, + { + "epoch": 0.4341008500453908, + "grad_norm": 2.0937996953461604, + "learning_rate": 1.8886481154311925e-06, + "loss": 0.523, + "step": 10520 + }, + { + "epoch": 0.4341421143847487, + "grad_norm": 10.861643029771713, + "learning_rate": 1.8884544820429673e-06, + "loss": 0.5632, + "step": 10521 + }, + { + "epoch": 0.4341833787241066, + "grad_norm": 2.805990990430407, + "learning_rate": 1.8882608417159732e-06, + "loss": 0.4554, + "step": 10522 + }, + { + "epoch": 0.43422464306346453, + "grad_norm": 3.9986704523465995, + "learning_rate": 1.8880671944536691e-06, + "loss": 0.4765, + "step": 10523 + }, + { + "epoch": 0.4342659074028225, + "grad_norm": 4.997793049134995, + "learning_rate": 1.8878735402595143e-06, + "loss": 0.5821, + "step": 10524 + }, + { + "epoch": 0.4343071717421804, + "grad_norm": 2.669215364128738, + "learning_rate": 1.8876798791369677e-06, + "loss": 0.5109, + "step": 10525 + }, + { + "epoch": 0.43434843608153834, + "grad_norm": 2.8683100086241633, + "learning_rate": 1.887486211089489e-06, + "loss": 0.514, + "step": 10526 + }, + { + "epoch": 0.43438970042089625, + "grad_norm": 4.2545635506870925, + "learning_rate": 1.887292536120537e-06, + "loss": 0.4737, + "step": 10527 + }, + { + "epoch": 0.43443096476025417, + "grad_norm": 4.032552993935455, + "learning_rate": 1.887098854233571e-06, + "loss": 0.5028, + "step": 10528 + }, + { + "epoch": 0.43447222909961214, + "grad_norm": 5.2340245862795935, + "learning_rate": 1.8869051654320515e-06, + "loss": 0.5516, + "step": 10529 + }, + { + "epoch": 0.43451349343897006, + "grad_norm": 4.425108605481729, + "learning_rate": 1.8867114697194377e-06, + "loss": 0.4898, + "step": 10530 + }, + { + "epoch": 0.43455475777832797, + "grad_norm": 6.632321192690955, + "learning_rate": 1.8865177670991897e-06, + "loss": 0.5581, + "step": 10531 + }, + { + "epoch": 0.4345960221176859, + "grad_norm": 4.441392736301276, + "learning_rate": 1.8863240575747675e-06, + "loss": 0.5898, + "step": 10532 + }, + { + "epoch": 0.4346372864570438, + "grad_norm": 2.5782456788398163, + "learning_rate": 1.8861303411496312e-06, + "loss": 0.543, + "step": 10533 + }, + { + "epoch": 0.4346785507964018, + "grad_norm": 73.16161589896409, + "learning_rate": 1.8859366178272411e-06, + "loss": 0.5401, + "step": 10534 + }, + { + "epoch": 0.4347198151357597, + "grad_norm": 2.7343716003126506, + "learning_rate": 1.8857428876110574e-06, + "loss": 0.5564, + "step": 10535 + }, + { + "epoch": 0.4347610794751176, + "grad_norm": 11.788728280219912, + "learning_rate": 1.8855491505045409e-06, + "loss": 0.5194, + "step": 10536 + }, + { + "epoch": 0.4348023438144755, + "grad_norm": 3.931770028187181, + "learning_rate": 1.885355406511152e-06, + "loss": 0.5179, + "step": 10537 + }, + { + "epoch": 0.43484360815383344, + "grad_norm": 4.34898204646515, + "learning_rate": 1.8851616556343515e-06, + "loss": 0.5449, + "step": 10538 + }, + { + "epoch": 0.4348848724931914, + "grad_norm": 2.5175157210695915, + "learning_rate": 1.8849678978776005e-06, + "loss": 0.5358, + "step": 10539 + }, + { + "epoch": 0.4349261368325493, + "grad_norm": 4.799802774060145, + "learning_rate": 1.8847741332443595e-06, + "loss": 0.5027, + "step": 10540 + }, + { + "epoch": 0.43496740117190724, + "grad_norm": 5.460890751516273, + "learning_rate": 1.88458036173809e-06, + "loss": 0.5793, + "step": 10541 + }, + { + "epoch": 0.43500866551126516, + "grad_norm": 1.8752879309506651, + "learning_rate": 1.8843865833622532e-06, + "loss": 0.5706, + "step": 10542 + }, + { + "epoch": 0.4350499298506231, + "grad_norm": 3.1064649958842843, + "learning_rate": 1.8841927981203105e-06, + "loss": 0.4894, + "step": 10543 + }, + { + "epoch": 0.43509119418998105, + "grad_norm": 2.6757850559298064, + "learning_rate": 1.8839990060157231e-06, + "loss": 0.539, + "step": 10544 + }, + { + "epoch": 0.43513245852933896, + "grad_norm": 4.168415537108513, + "learning_rate": 1.883805207051953e-06, + "loss": 0.5062, + "step": 10545 + }, + { + "epoch": 0.4351737228686969, + "grad_norm": 2.6976266842164462, + "learning_rate": 1.8836114012324623e-06, + "loss": 0.5468, + "step": 10546 + }, + { + "epoch": 0.4352149872080548, + "grad_norm": 6.332955724877369, + "learning_rate": 1.8834175885607116e-06, + "loss": 0.5542, + "step": 10547 + }, + { + "epoch": 0.4352562515474127, + "grad_norm": 3.7755811372622383, + "learning_rate": 1.883223769040164e-06, + "loss": 0.5366, + "step": 10548 + }, + { + "epoch": 0.4352975158867706, + "grad_norm": 3.6396786564661925, + "learning_rate": 1.8830299426742806e-06, + "loss": 0.5404, + "step": 10549 + }, + { + "epoch": 0.4353387802261286, + "grad_norm": 6.403318966608506, + "learning_rate": 1.8828361094665248e-06, + "loss": 0.5122, + "step": 10550 + }, + { + "epoch": 0.4353800445654865, + "grad_norm": 4.1889908450025874, + "learning_rate": 1.8826422694203582e-06, + "loss": 0.5431, + "step": 10551 + }, + { + "epoch": 0.43542130890484443, + "grad_norm": 18.436144267747856, + "learning_rate": 1.8824484225392435e-06, + "loss": 0.5485, + "step": 10552 + }, + { + "epoch": 0.43546257324420234, + "grad_norm": 7.339606820820188, + "learning_rate": 1.8822545688266433e-06, + "loss": 0.5792, + "step": 10553 + }, + { + "epoch": 0.43550383758356026, + "grad_norm": 2.2365314820998345, + "learning_rate": 1.8820607082860199e-06, + "loss": 0.5093, + "step": 10554 + }, + { + "epoch": 0.43554510192291823, + "grad_norm": 3.484084054839052, + "learning_rate": 1.8818668409208368e-06, + "loss": 0.5415, + "step": 10555 + }, + { + "epoch": 0.43558636626227615, + "grad_norm": 8.423152565118121, + "learning_rate": 1.8816729667345564e-06, + "loss": 0.5736, + "step": 10556 + }, + { + "epoch": 0.43562763060163406, + "grad_norm": 4.002371626867554, + "learning_rate": 1.8814790857306423e-06, + "loss": 0.5232, + "step": 10557 + }, + { + "epoch": 0.435668894940992, + "grad_norm": 3.07109700650755, + "learning_rate": 1.8812851979125572e-06, + "loss": 0.4731, + "step": 10558 + }, + { + "epoch": 0.4357101592803499, + "grad_norm": 4.147921922362069, + "learning_rate": 1.881091303283765e-06, + "loss": 0.5364, + "step": 10559 + }, + { + "epoch": 0.43575142361970787, + "grad_norm": 9.316694980275447, + "learning_rate": 1.880897401847728e-06, + "loss": 0.5277, + "step": 10560 + }, + { + "epoch": 0.4357926879590658, + "grad_norm": 3.776097419023877, + "learning_rate": 1.8807034936079114e-06, + "loss": 0.5354, + "step": 10561 + }, + { + "epoch": 0.4358339522984237, + "grad_norm": 7.652459092992647, + "learning_rate": 1.8805095785677775e-06, + "loss": 0.5691, + "step": 10562 + }, + { + "epoch": 0.4358752166377816, + "grad_norm": 3.4089500485193516, + "learning_rate": 1.880315656730791e-06, + "loss": 0.4943, + "step": 10563 + }, + { + "epoch": 0.43591648097713953, + "grad_norm": 16.23983184187221, + "learning_rate": 1.880121728100415e-06, + "loss": 0.6041, + "step": 10564 + }, + { + "epoch": 0.4359577453164975, + "grad_norm": 19.280038505318274, + "learning_rate": 1.8799277926801147e-06, + "loss": 0.5653, + "step": 10565 + }, + { + "epoch": 0.4359990096558554, + "grad_norm": 2.4349632596125392, + "learning_rate": 1.8797338504733533e-06, + "loss": 0.5239, + "step": 10566 + }, + { + "epoch": 0.43604027399521333, + "grad_norm": 9.545415372929972, + "learning_rate": 1.8795399014835952e-06, + "loss": 0.4935, + "step": 10567 + }, + { + "epoch": 0.43608153833457125, + "grad_norm": 2.3387488823104885, + "learning_rate": 1.8793459457143046e-06, + "loss": 0.5806, + "step": 10568 + }, + { + "epoch": 0.43612280267392917, + "grad_norm": 4.88725446505866, + "learning_rate": 1.8791519831689472e-06, + "loss": 0.5426, + "step": 10569 + }, + { + "epoch": 0.43616406701328714, + "grad_norm": 2.610193256633933, + "learning_rate": 1.8789580138509865e-06, + "loss": 0.4872, + "step": 10570 + }, + { + "epoch": 0.43620533135264505, + "grad_norm": 10.860907221105645, + "learning_rate": 1.878764037763888e-06, + "loss": 0.4445, + "step": 10571 + }, + { + "epoch": 0.43624659569200297, + "grad_norm": 2.8845877110980687, + "learning_rate": 1.8785700549111158e-06, + "loss": 0.5507, + "step": 10572 + }, + { + "epoch": 0.4362878600313609, + "grad_norm": 4.258916733250806, + "learning_rate": 1.8783760652961358e-06, + "loss": 0.5305, + "step": 10573 + }, + { + "epoch": 0.4363291243707188, + "grad_norm": 2.5626170997186852, + "learning_rate": 1.8781820689224124e-06, + "loss": 0.6123, + "step": 10574 + }, + { + "epoch": 0.4363703887100768, + "grad_norm": 2.7342791560280464, + "learning_rate": 1.8779880657934114e-06, + "loss": 0.5696, + "step": 10575 + }, + { + "epoch": 0.4364116530494347, + "grad_norm": 3.723457191724088, + "learning_rate": 1.8777940559125975e-06, + "loss": 0.4845, + "step": 10576 + }, + { + "epoch": 0.4364529173887926, + "grad_norm": 2.778386451844508, + "learning_rate": 1.8776000392834372e-06, + "loss": 0.4849, + "step": 10577 + }, + { + "epoch": 0.4364941817281505, + "grad_norm": 3.5532482741903593, + "learning_rate": 1.877406015909395e-06, + "loss": 0.5285, + "step": 10578 + }, + { + "epoch": 0.43653544606750844, + "grad_norm": 5.3577810597398905, + "learning_rate": 1.8772119857939386e-06, + "loss": 0.5527, + "step": 10579 + }, + { + "epoch": 0.4365767104068664, + "grad_norm": 12.103545307631316, + "learning_rate": 1.877017948940531e-06, + "loss": 0.5774, + "step": 10580 + }, + { + "epoch": 0.4366179747462243, + "grad_norm": 4.742176935557552, + "learning_rate": 1.8768239053526405e-06, + "loss": 0.5789, + "step": 10581 + }, + { + "epoch": 0.43665923908558224, + "grad_norm": 2.793238395363805, + "learning_rate": 1.8766298550337318e-06, + "loss": 0.4845, + "step": 10582 + }, + { + "epoch": 0.43670050342494016, + "grad_norm": 2.7047860265678736, + "learning_rate": 1.876435797987272e-06, + "loss": 0.5167, + "step": 10583 + }, + { + "epoch": 0.43674176776429807, + "grad_norm": 5.304744432827652, + "learning_rate": 1.876241734216727e-06, + "loss": 0.496, + "step": 10584 + }, + { + "epoch": 0.43678303210365604, + "grad_norm": 4.186335939324664, + "learning_rate": 1.876047663725564e-06, + "loss": 0.5636, + "step": 10585 + }, + { + "epoch": 0.43682429644301396, + "grad_norm": 5.403220317562237, + "learning_rate": 1.8758535865172484e-06, + "loss": 0.484, + "step": 10586 + }, + { + "epoch": 0.4368655607823719, + "grad_norm": 2.8458033951852606, + "learning_rate": 1.8756595025952477e-06, + "loss": 0.5661, + "step": 10587 + }, + { + "epoch": 0.4369068251217298, + "grad_norm": 17.681036937923103, + "learning_rate": 1.8754654119630285e-06, + "loss": 0.5212, + "step": 10588 + }, + { + "epoch": 0.4369480894610877, + "grad_norm": 3.0292632974303175, + "learning_rate": 1.8752713146240579e-06, + "loss": 0.5641, + "step": 10589 + }, + { + "epoch": 0.4369893538004457, + "grad_norm": 2.8455471162433392, + "learning_rate": 1.8750772105818025e-06, + "loss": 0.535, + "step": 10590 + }, + { + "epoch": 0.4370306181398036, + "grad_norm": 7.7507347016612, + "learning_rate": 1.87488309983973e-06, + "loss": 0.525, + "step": 10591 + }, + { + "epoch": 0.4370718824791615, + "grad_norm": 4.995143437521271, + "learning_rate": 1.8746889824013078e-06, + "loss": 0.5711, + "step": 10592 + }, + { + "epoch": 0.4371131468185194, + "grad_norm": 13.749637405279891, + "learning_rate": 1.8744948582700023e-06, + "loss": 0.5311, + "step": 10593 + }, + { + "epoch": 0.43715441115787734, + "grad_norm": 3.239130805594694, + "learning_rate": 1.8743007274492826e-06, + "loss": 0.5554, + "step": 10594 + }, + { + "epoch": 0.4371956754972353, + "grad_norm": 37.64191244576077, + "learning_rate": 1.8741065899426154e-06, + "loss": 0.5161, + "step": 10595 + }, + { + "epoch": 0.43723693983659323, + "grad_norm": 4.694680934969611, + "learning_rate": 1.8739124457534683e-06, + "loss": 0.4552, + "step": 10596 + }, + { + "epoch": 0.43727820417595115, + "grad_norm": 2.9022846635965065, + "learning_rate": 1.87371829488531e-06, + "loss": 0.5739, + "step": 10597 + }, + { + "epoch": 0.43731946851530906, + "grad_norm": 5.846314606581282, + "learning_rate": 1.8735241373416077e-06, + "loss": 0.5559, + "step": 10598 + }, + { + "epoch": 0.437360732854667, + "grad_norm": 4.199189536348993, + "learning_rate": 1.8733299731258296e-06, + "loss": 0.5019, + "step": 10599 + }, + { + "epoch": 0.43740199719402495, + "grad_norm": 4.488948206851878, + "learning_rate": 1.8731358022414448e-06, + "loss": 0.5332, + "step": 10600 + }, + { + "epoch": 0.43744326153338287, + "grad_norm": 3.0261572933425516, + "learning_rate": 1.8729416246919208e-06, + "loss": 0.5205, + "step": 10601 + }, + { + "epoch": 0.4374845258727408, + "grad_norm": 5.079568069536789, + "learning_rate": 1.8727474404807264e-06, + "loss": 0.5035, + "step": 10602 + }, + { + "epoch": 0.4375257902120987, + "grad_norm": 2.9008433991621847, + "learning_rate": 1.8725532496113304e-06, + "loss": 0.5329, + "step": 10603 + }, + { + "epoch": 0.4375670545514566, + "grad_norm": 4.717569635527405, + "learning_rate": 1.8723590520872016e-06, + "loss": 0.5799, + "step": 10604 + }, + { + "epoch": 0.4376083188908146, + "grad_norm": 3.372402918869439, + "learning_rate": 1.8721648479118084e-06, + "loss": 0.5196, + "step": 10605 + }, + { + "epoch": 0.4376495832301725, + "grad_norm": 2.8659402520392674, + "learning_rate": 1.8719706370886201e-06, + "loss": 0.5122, + "step": 10606 + }, + { + "epoch": 0.4376908475695304, + "grad_norm": 3.2503116184733885, + "learning_rate": 1.8717764196211053e-06, + "loss": 0.5464, + "step": 10607 + }, + { + "epoch": 0.43773211190888833, + "grad_norm": 3.2405359243938685, + "learning_rate": 1.871582195512734e-06, + "loss": 0.5723, + "step": 10608 + }, + { + "epoch": 0.43777337624824625, + "grad_norm": 6.361916891564709, + "learning_rate": 1.871387964766975e-06, + "loss": 0.5148, + "step": 10609 + }, + { + "epoch": 0.4378146405876042, + "grad_norm": 5.215570139898451, + "learning_rate": 1.871193727387298e-06, + "loss": 0.6059, + "step": 10610 + }, + { + "epoch": 0.43785590492696214, + "grad_norm": 2.992302409304897, + "learning_rate": 1.8709994833771722e-06, + "loss": 0.4906, + "step": 10611 + }, + { + "epoch": 0.43789716926632005, + "grad_norm": 3.8564466884156015, + "learning_rate": 1.870805232740068e-06, + "loss": 0.5119, + "step": 10612 + }, + { + "epoch": 0.43793843360567797, + "grad_norm": 4.01520936776417, + "learning_rate": 1.8706109754794541e-06, + "loss": 0.5739, + "step": 10613 + }, + { + "epoch": 0.4379796979450359, + "grad_norm": 4.351004748483962, + "learning_rate": 1.8704167115988018e-06, + "loss": 0.4819, + "step": 10614 + }, + { + "epoch": 0.4380209622843938, + "grad_norm": 6.60634512827213, + "learning_rate": 1.8702224411015797e-06, + "loss": 0.5118, + "step": 10615 + }, + { + "epoch": 0.43806222662375177, + "grad_norm": 5.681412301206762, + "learning_rate": 1.8700281639912588e-06, + "loss": 0.5426, + "step": 10616 + }, + { + "epoch": 0.4381034909631097, + "grad_norm": 5.41243141354381, + "learning_rate": 1.869833880271309e-06, + "loss": 0.5489, + "step": 10617 + }, + { + "epoch": 0.4381447553024676, + "grad_norm": 4.376976940393738, + "learning_rate": 1.8696395899452017e-06, + "loss": 0.5839, + "step": 10618 + }, + { + "epoch": 0.4381860196418255, + "grad_norm": 5.005572051788952, + "learning_rate": 1.869445293016406e-06, + "loss": 0.5281, + "step": 10619 + }, + { + "epoch": 0.43822728398118344, + "grad_norm": 4.206241479889449, + "learning_rate": 1.8692509894883935e-06, + "loss": 0.5081, + "step": 10620 + }, + { + "epoch": 0.4382685483205414, + "grad_norm": 11.58074940410743, + "learning_rate": 1.8690566793646345e-06, + "loss": 0.59, + "step": 10621 + }, + { + "epoch": 0.4383098126598993, + "grad_norm": 2.945061382875263, + "learning_rate": 1.8688623626485999e-06, + "loss": 0.5486, + "step": 10622 + }, + { + "epoch": 0.43835107699925724, + "grad_norm": 5.093106954596554, + "learning_rate": 1.8686680393437605e-06, + "loss": 0.531, + "step": 10623 + }, + { + "epoch": 0.43839234133861515, + "grad_norm": 2.8890871765018735, + "learning_rate": 1.8684737094535878e-06, + "loss": 0.5577, + "step": 10624 + }, + { + "epoch": 0.43843360567797307, + "grad_norm": 10.59860659352766, + "learning_rate": 1.868279372981553e-06, + "loss": 0.5624, + "step": 10625 + }, + { + "epoch": 0.43847487001733104, + "grad_norm": 4.841109075716558, + "learning_rate": 1.8680850299311276e-06, + "loss": 0.4838, + "step": 10626 + }, + { + "epoch": 0.43851613435668896, + "grad_norm": 2.7691309769526327, + "learning_rate": 1.867890680305782e-06, + "loss": 0.5214, + "step": 10627 + }, + { + "epoch": 0.4385573986960469, + "grad_norm": 3.6277481056601597, + "learning_rate": 1.8676963241089894e-06, + "loss": 0.4921, + "step": 10628 + }, + { + "epoch": 0.4385986630354048, + "grad_norm": 4.697605200179844, + "learning_rate": 1.86750196134422e-06, + "loss": 0.5621, + "step": 10629 + }, + { + "epoch": 0.4386399273747627, + "grad_norm": 2.99721673485691, + "learning_rate": 1.8673075920149462e-06, + "loss": 0.61, + "step": 10630 + }, + { + "epoch": 0.4386811917141207, + "grad_norm": 8.720224725782941, + "learning_rate": 1.8671132161246406e-06, + "loss": 0.5364, + "step": 10631 + }, + { + "epoch": 0.4387224560534786, + "grad_norm": 4.145701852885221, + "learning_rate": 1.8669188336767737e-06, + "loss": 0.4863, + "step": 10632 + }, + { + "epoch": 0.4387637203928365, + "grad_norm": 5.668225548417473, + "learning_rate": 1.8667244446748193e-06, + "loss": 0.5889, + "step": 10633 + }, + { + "epoch": 0.4388049847321944, + "grad_norm": 3.5471660214997014, + "learning_rate": 1.8665300491222486e-06, + "loss": 0.4881, + "step": 10634 + }, + { + "epoch": 0.43884624907155234, + "grad_norm": 2.9765247581616006, + "learning_rate": 1.866335647022534e-06, + "loss": 0.565, + "step": 10635 + }, + { + "epoch": 0.4388875134109103, + "grad_norm": 2.0266988787288103, + "learning_rate": 1.8661412383791488e-06, + "loss": 0.4865, + "step": 10636 + }, + { + "epoch": 0.43892877775026823, + "grad_norm": 5.492269039718005, + "learning_rate": 1.8659468231955646e-06, + "loss": 0.6163, + "step": 10637 + }, + { + "epoch": 0.43897004208962614, + "grad_norm": 113.1910153256722, + "learning_rate": 1.8657524014752554e-06, + "loss": 0.5195, + "step": 10638 + }, + { + "epoch": 0.43901130642898406, + "grad_norm": 8.68069903807964, + "learning_rate": 1.865557973221693e-06, + "loss": 0.5095, + "step": 10639 + }, + { + "epoch": 0.439052570768342, + "grad_norm": 3.2936267109212123, + "learning_rate": 1.8653635384383506e-06, + "loss": 0.5446, + "step": 10640 + }, + { + "epoch": 0.43909383510769995, + "grad_norm": 2.711827004540543, + "learning_rate": 1.8651690971287011e-06, + "loss": 0.4784, + "step": 10641 + }, + { + "epoch": 0.43913509944705786, + "grad_norm": 8.356844722708583, + "learning_rate": 1.8649746492962182e-06, + "loss": 0.5525, + "step": 10642 + }, + { + "epoch": 0.4391763637864158, + "grad_norm": 3.48730220352708, + "learning_rate": 1.8647801949443757e-06, + "loss": 0.5244, + "step": 10643 + }, + { + "epoch": 0.4392176281257737, + "grad_norm": 2.987377916531046, + "learning_rate": 1.864585734076646e-06, + "loss": 0.4967, + "step": 10644 + }, + { + "epoch": 0.4392588924651316, + "grad_norm": 7.102109889762355, + "learning_rate": 1.8643912666965029e-06, + "loss": 0.5361, + "step": 10645 + }, + { + "epoch": 0.4393001568044896, + "grad_norm": 2.5762385401209356, + "learning_rate": 1.86419679280742e-06, + "loss": 0.5002, + "step": 10646 + }, + { + "epoch": 0.4393414211438475, + "grad_norm": 8.371097225583654, + "learning_rate": 1.8640023124128718e-06, + "loss": 0.508, + "step": 10647 + }, + { + "epoch": 0.4393826854832054, + "grad_norm": 12.552432806582221, + "learning_rate": 1.8638078255163311e-06, + "loss": 0.5176, + "step": 10648 + }, + { + "epoch": 0.43942394982256333, + "grad_norm": 6.181680573958159, + "learning_rate": 1.863613332121273e-06, + "loss": 0.5223, + "step": 10649 + }, + { + "epoch": 0.43946521416192125, + "grad_norm": 2.6345910968377138, + "learning_rate": 1.863418832231171e-06, + "loss": 0.5524, + "step": 10650 + }, + { + "epoch": 0.4395064785012792, + "grad_norm": 2.590478216494355, + "learning_rate": 1.8632243258494998e-06, + "loss": 0.577, + "step": 10651 + }, + { + "epoch": 0.43954774284063713, + "grad_norm": 19.60077618550021, + "learning_rate": 1.8630298129797329e-06, + "loss": 0.5581, + "step": 10652 + }, + { + "epoch": 0.43958900717999505, + "grad_norm": 3.040870310777735, + "learning_rate": 1.862835293625346e-06, + "loss": 0.5549, + "step": 10653 + }, + { + "epoch": 0.43963027151935297, + "grad_norm": 2.9817066080431096, + "learning_rate": 1.8626407677898128e-06, + "loss": 0.5242, + "step": 10654 + }, + { + "epoch": 0.4396715358587109, + "grad_norm": 3.859358741464223, + "learning_rate": 1.8624462354766081e-06, + "loss": 0.5434, + "step": 10655 + }, + { + "epoch": 0.43971280019806885, + "grad_norm": 5.9911425804210925, + "learning_rate": 1.8622516966892068e-06, + "loss": 0.5675, + "step": 10656 + }, + { + "epoch": 0.43975406453742677, + "grad_norm": 3.327480623596692, + "learning_rate": 1.8620571514310852e-06, + "loss": 0.5726, + "step": 10657 + }, + { + "epoch": 0.4397953288767847, + "grad_norm": 2.683247404942419, + "learning_rate": 1.861862599705716e-06, + "loss": 0.5044, + "step": 10658 + }, + { + "epoch": 0.4398365932161426, + "grad_norm": 10.359020129163108, + "learning_rate": 1.8616680415165759e-06, + "loss": 0.4868, + "step": 10659 + }, + { + "epoch": 0.4398778575555005, + "grad_norm": 3.1996406195684566, + "learning_rate": 1.8614734768671396e-06, + "loss": 0.5361, + "step": 10660 + }, + { + "epoch": 0.4399191218948585, + "grad_norm": 2.7749118954352188, + "learning_rate": 1.8612789057608828e-06, + "loss": 0.4924, + "step": 10661 + }, + { + "epoch": 0.4399603862342164, + "grad_norm": 2.755040279076545, + "learning_rate": 1.8610843282012808e-06, + "loss": 0.5391, + "step": 10662 + }, + { + "epoch": 0.4400016505735743, + "grad_norm": 6.684634382875467, + "learning_rate": 1.8608897441918099e-06, + "loss": 0.5386, + "step": 10663 + }, + { + "epoch": 0.44004291491293224, + "grad_norm": 6.3775499079370075, + "learning_rate": 1.860695153735945e-06, + "loss": 0.5193, + "step": 10664 + }, + { + "epoch": 0.44008417925229015, + "grad_norm": 4.320316820345163, + "learning_rate": 1.8605005568371626e-06, + "loss": 0.5256, + "step": 10665 + }, + { + "epoch": 0.4401254435916481, + "grad_norm": 75.3849956141445, + "learning_rate": 1.860305953498938e-06, + "loss": 0.5177, + "step": 10666 + }, + { + "epoch": 0.44016670793100604, + "grad_norm": 3.816110610221265, + "learning_rate": 1.8601113437247482e-06, + "loss": 0.5583, + "step": 10667 + }, + { + "epoch": 0.44020797227036396, + "grad_norm": 4.639303469710022, + "learning_rate": 1.8599167275180684e-06, + "loss": 0.5511, + "step": 10668 + }, + { + "epoch": 0.4402492366097219, + "grad_norm": 7.678149594822043, + "learning_rate": 1.859722104882376e-06, + "loss": 0.5475, + "step": 10669 + }, + { + "epoch": 0.4402905009490798, + "grad_norm": 2.783312439481584, + "learning_rate": 1.8595274758211463e-06, + "loss": 0.5535, + "step": 10670 + }, + { + "epoch": 0.44033176528843776, + "grad_norm": 7.35517882731071, + "learning_rate": 1.8593328403378571e-06, + "loss": 0.5442, + "step": 10671 + }, + { + "epoch": 0.4403730296277957, + "grad_norm": 6.528998188644452, + "learning_rate": 1.8591381984359844e-06, + "loss": 0.5034, + "step": 10672 + }, + { + "epoch": 0.4404142939671536, + "grad_norm": 5.439302433553587, + "learning_rate": 1.8589435501190049e-06, + "loss": 0.5269, + "step": 10673 + }, + { + "epoch": 0.4404555583065115, + "grad_norm": 6.911875945357439, + "learning_rate": 1.8587488953903955e-06, + "loss": 0.5604, + "step": 10674 + }, + { + "epoch": 0.4404968226458694, + "grad_norm": 3.800984260439657, + "learning_rate": 1.8585542342536335e-06, + "loss": 0.5767, + "step": 10675 + }, + { + "epoch": 0.44053808698522734, + "grad_norm": 1.925562745700986, + "learning_rate": 1.858359566712196e-06, + "loss": 0.5162, + "step": 10676 + }, + { + "epoch": 0.4405793513245853, + "grad_norm": 3.173127342144269, + "learning_rate": 1.8581648927695601e-06, + "loss": 0.5539, + "step": 10677 + }, + { + "epoch": 0.4406206156639432, + "grad_norm": 2.786614111260821, + "learning_rate": 1.8579702124292032e-06, + "loss": 0.5474, + "step": 10678 + }, + { + "epoch": 0.44066188000330114, + "grad_norm": 3.3589753742424047, + "learning_rate": 1.8577755256946027e-06, + "loss": 0.4996, + "step": 10679 + }, + { + "epoch": 0.44070314434265906, + "grad_norm": 5.925395090052474, + "learning_rate": 1.8575808325692364e-06, + "loss": 0.5495, + "step": 10680 + }, + { + "epoch": 0.440744408682017, + "grad_norm": 3.515617208972978, + "learning_rate": 1.8573861330565818e-06, + "loss": 0.5162, + "step": 10681 + }, + { + "epoch": 0.44078567302137495, + "grad_norm": 4.380265343851988, + "learning_rate": 1.857191427160117e-06, + "loss": 0.5205, + "step": 10682 + }, + { + "epoch": 0.44082693736073286, + "grad_norm": 8.702012812100609, + "learning_rate": 1.85699671488332e-06, + "loss": 0.5197, + "step": 10683 + }, + { + "epoch": 0.4408682017000908, + "grad_norm": 6.657384875486715, + "learning_rate": 1.8568019962296683e-06, + "loss": 0.5297, + "step": 10684 + }, + { + "epoch": 0.4409094660394487, + "grad_norm": 5.253258199152249, + "learning_rate": 1.85660727120264e-06, + "loss": 0.5535, + "step": 10685 + }, + { + "epoch": 0.4409507303788066, + "grad_norm": 4.39175167218374, + "learning_rate": 1.8564125398057142e-06, + "loss": 0.5054, + "step": 10686 + }, + { + "epoch": 0.4409919947181646, + "grad_norm": 3.7761229505537433, + "learning_rate": 1.8562178020423687e-06, + "loss": 0.5376, + "step": 10687 + }, + { + "epoch": 0.4410332590575225, + "grad_norm": 2.5135412664372265, + "learning_rate": 1.8560230579160822e-06, + "loss": 0.4813, + "step": 10688 + }, + { + "epoch": 0.4410745233968804, + "grad_norm": 5.380209185161278, + "learning_rate": 1.855828307430333e-06, + "loss": 0.4869, + "step": 10689 + }, + { + "epoch": 0.44111578773623833, + "grad_norm": 3.371891542671272, + "learning_rate": 1.8556335505886009e-06, + "loss": 0.5032, + "step": 10690 + }, + { + "epoch": 0.44115705207559625, + "grad_norm": 2.482588320404016, + "learning_rate": 1.855438787394363e-06, + "loss": 0.5286, + "step": 10691 + }, + { + "epoch": 0.4411983164149542, + "grad_norm": 2.3527240926815196, + "learning_rate": 1.8552440178510997e-06, + "loss": 0.6015, + "step": 10692 + }, + { + "epoch": 0.44123958075431213, + "grad_norm": 3.087582247295467, + "learning_rate": 1.855049241962289e-06, + "loss": 0.5171, + "step": 10693 + }, + { + "epoch": 0.44128084509367005, + "grad_norm": 5.516727109060138, + "learning_rate": 1.8548544597314111e-06, + "loss": 0.4977, + "step": 10694 + }, + { + "epoch": 0.44132210943302796, + "grad_norm": 6.805999643929403, + "learning_rate": 1.8546596711619446e-06, + "loss": 0.4493, + "step": 10695 + }, + { + "epoch": 0.4413633737723859, + "grad_norm": 6.199758898012319, + "learning_rate": 1.8544648762573693e-06, + "loss": 0.5358, + "step": 10696 + }, + { + "epoch": 0.44140463811174385, + "grad_norm": 2.3354066208896733, + "learning_rate": 1.8542700750211648e-06, + "loss": 0.5607, + "step": 10697 + }, + { + "epoch": 0.44144590245110177, + "grad_norm": 5.978842323579559, + "learning_rate": 1.8540752674568105e-06, + "loss": 0.5165, + "step": 10698 + }, + { + "epoch": 0.4414871667904597, + "grad_norm": 2.151317796265388, + "learning_rate": 1.8538804535677855e-06, + "loss": 0.528, + "step": 10699 + }, + { + "epoch": 0.4415284311298176, + "grad_norm": 18.949675582189528, + "learning_rate": 1.853685633357571e-06, + "loss": 0.5451, + "step": 10700 + }, + { + "epoch": 0.4415696954691755, + "grad_norm": 3.4081508884795246, + "learning_rate": 1.8534908068296461e-06, + "loss": 0.5335, + "step": 10701 + }, + { + "epoch": 0.4416109598085335, + "grad_norm": 2.301703139999511, + "learning_rate": 1.8532959739874911e-06, + "loss": 0.4412, + "step": 10702 + }, + { + "epoch": 0.4416522241478914, + "grad_norm": 3.5805082834616457, + "learning_rate": 1.8531011348345862e-06, + "loss": 0.5538, + "step": 10703 + }, + { + "epoch": 0.4416934884872493, + "grad_norm": 15.085524617152318, + "learning_rate": 1.8529062893744118e-06, + "loss": 0.5671, + "step": 10704 + }, + { + "epoch": 0.44173475282660724, + "grad_norm": 1.9235284399753845, + "learning_rate": 1.852711437610448e-06, + "loss": 0.4912, + "step": 10705 + }, + { + "epoch": 0.44177601716596515, + "grad_norm": 3.387105174536081, + "learning_rate": 1.852516579546176e-06, + "loss": 0.5135, + "step": 10706 + }, + { + "epoch": 0.4418172815053231, + "grad_norm": 6.367461363843089, + "learning_rate": 1.8523217151850754e-06, + "loss": 0.5311, + "step": 10707 + }, + { + "epoch": 0.44185854584468104, + "grad_norm": 2.511325621800029, + "learning_rate": 1.8521268445306276e-06, + "loss": 0.5504, + "step": 10708 + }, + { + "epoch": 0.44189981018403895, + "grad_norm": 2.166119919593272, + "learning_rate": 1.8519319675863137e-06, + "loss": 0.5081, + "step": 10709 + }, + { + "epoch": 0.44194107452339687, + "grad_norm": 3.7306788507540363, + "learning_rate": 1.8517370843556145e-06, + "loss": 0.5135, + "step": 10710 + }, + { + "epoch": 0.4419823388627548, + "grad_norm": 3.078460338691311, + "learning_rate": 1.8515421948420115e-06, + "loss": 0.5332, + "step": 10711 + }, + { + "epoch": 0.44202360320211276, + "grad_norm": 2.2293255909808773, + "learning_rate": 1.851347299048985e-06, + "loss": 0.5132, + "step": 10712 + }, + { + "epoch": 0.4420648675414707, + "grad_norm": 3.379072156132889, + "learning_rate": 1.8511523969800168e-06, + "loss": 0.5271, + "step": 10713 + }, + { + "epoch": 0.4421061318808286, + "grad_norm": 4.282108869713928, + "learning_rate": 1.8509574886385886e-06, + "loss": 0.5021, + "step": 10714 + }, + { + "epoch": 0.4421473962201865, + "grad_norm": 4.011998093749092, + "learning_rate": 1.8507625740281811e-06, + "loss": 0.505, + "step": 10715 + }, + { + "epoch": 0.4421886605595444, + "grad_norm": 2.080108963920599, + "learning_rate": 1.850567653152277e-06, + "loss": 0.5433, + "step": 10716 + }, + { + "epoch": 0.4422299248989024, + "grad_norm": 3.1254926662019664, + "learning_rate": 1.850372726014358e-06, + "loss": 0.5706, + "step": 10717 + }, + { + "epoch": 0.4422711892382603, + "grad_norm": 4.069547717592624, + "learning_rate": 1.8501777926179048e-06, + "loss": 0.5157, + "step": 10718 + }, + { + "epoch": 0.4423124535776182, + "grad_norm": 2.6793564018393994, + "learning_rate": 1.8499828529664008e-06, + "loss": 0.5844, + "step": 10719 + }, + { + "epoch": 0.44235371791697614, + "grad_norm": 2.91439731430025, + "learning_rate": 1.849787907063327e-06, + "loss": 0.5327, + "step": 10720 + }, + { + "epoch": 0.44239498225633406, + "grad_norm": 2.9950452417454354, + "learning_rate": 1.8495929549121667e-06, + "loss": 0.5311, + "step": 10721 + }, + { + "epoch": 0.44243624659569203, + "grad_norm": 2.413541558162656, + "learning_rate": 1.8493979965164014e-06, + "loss": 0.5463, + "step": 10722 + }, + { + "epoch": 0.44247751093504994, + "grad_norm": 2.568003138532834, + "learning_rate": 1.8492030318795144e-06, + "loss": 0.5597, + "step": 10723 + }, + { + "epoch": 0.44251877527440786, + "grad_norm": 2.8280513568920274, + "learning_rate": 1.8490080610049868e-06, + "loss": 0.502, + "step": 10724 + }, + { + "epoch": 0.4425600396137658, + "grad_norm": 2.146384796096256, + "learning_rate": 1.8488130838963026e-06, + "loss": 0.4706, + "step": 10725 + }, + { + "epoch": 0.4426013039531237, + "grad_norm": 2.9598563953966823, + "learning_rate": 1.8486181005569438e-06, + "loss": 0.5622, + "step": 10726 + }, + { + "epoch": 0.44264256829248166, + "grad_norm": 2.7524448086147166, + "learning_rate": 1.848423110990394e-06, + "loss": 0.5457, + "step": 10727 + }, + { + "epoch": 0.4426838326318396, + "grad_norm": 2.546759960180899, + "learning_rate": 1.8482281152001354e-06, + "loss": 0.5237, + "step": 10728 + }, + { + "epoch": 0.4427250969711975, + "grad_norm": 2.419633387537525, + "learning_rate": 1.848033113189652e-06, + "loss": 0.5165, + "step": 10729 + }, + { + "epoch": 0.4427663613105554, + "grad_norm": 3.2390490024072336, + "learning_rate": 1.8478381049624264e-06, + "loss": 0.54, + "step": 10730 + }, + { + "epoch": 0.44280762564991333, + "grad_norm": 15.908134321152477, + "learning_rate": 1.847643090521942e-06, + "loss": 0.5601, + "step": 10731 + }, + { + "epoch": 0.4428488899892713, + "grad_norm": 4.963751617640989, + "learning_rate": 1.8474480698716823e-06, + "loss": 0.6043, + "step": 10732 + }, + { + "epoch": 0.4428901543286292, + "grad_norm": 4.1847475370084215, + "learning_rate": 1.847253043015131e-06, + "loss": 0.5331, + "step": 10733 + }, + { + "epoch": 0.44293141866798713, + "grad_norm": 3.725042068884233, + "learning_rate": 1.8470580099557715e-06, + "loss": 0.5301, + "step": 10734 + }, + { + "epoch": 0.44297268300734505, + "grad_norm": 2.435789433619693, + "learning_rate": 1.8468629706970878e-06, + "loss": 0.5103, + "step": 10735 + }, + { + "epoch": 0.44301394734670296, + "grad_norm": 4.459655537109962, + "learning_rate": 1.846667925242564e-06, + "loss": 0.4722, + "step": 10736 + }, + { + "epoch": 0.4430552116860609, + "grad_norm": 9.362661842562263, + "learning_rate": 1.8464728735956837e-06, + "loss": 0.5036, + "step": 10737 + }, + { + "epoch": 0.44309647602541885, + "grad_norm": 2.8410516010365443, + "learning_rate": 1.846277815759931e-06, + "loss": 0.5242, + "step": 10738 + }, + { + "epoch": 0.44313774036477677, + "grad_norm": 3.656344662201264, + "learning_rate": 1.8460827517387902e-06, + "loss": 0.511, + "step": 10739 + }, + { + "epoch": 0.4431790047041347, + "grad_norm": 3.415156956255487, + "learning_rate": 1.8458876815357457e-06, + "loss": 0.5106, + "step": 10740 + }, + { + "epoch": 0.4432202690434926, + "grad_norm": 9.822147047561563, + "learning_rate": 1.8456926051542821e-06, + "loss": 0.4808, + "step": 10741 + }, + { + "epoch": 0.4432615333828505, + "grad_norm": 3.7535428831300166, + "learning_rate": 1.8454975225978836e-06, + "loss": 0.5311, + "step": 10742 + }, + { + "epoch": 0.4433027977222085, + "grad_norm": 2.4112840795241266, + "learning_rate": 1.8453024338700356e-06, + "loss": 0.4897, + "step": 10743 + }, + { + "epoch": 0.4433440620615664, + "grad_norm": 4.638415771528004, + "learning_rate": 1.8451073389742214e-06, + "loss": 0.5244, + "step": 10744 + }, + { + "epoch": 0.4433853264009243, + "grad_norm": 2.9721442245632885, + "learning_rate": 1.8449122379139275e-06, + "loss": 0.5183, + "step": 10745 + }, + { + "epoch": 0.44342659074028223, + "grad_norm": 2.4414976068647523, + "learning_rate": 1.8447171306926379e-06, + "loss": 0.5037, + "step": 10746 + }, + { + "epoch": 0.44346785507964015, + "grad_norm": 5.19901968671669, + "learning_rate": 1.8445220173138378e-06, + "loss": 0.5553, + "step": 10747 + }, + { + "epoch": 0.4435091194189981, + "grad_norm": 2.6663913460519426, + "learning_rate": 1.8443268977810126e-06, + "loss": 0.5545, + "step": 10748 + }, + { + "epoch": 0.44355038375835604, + "grad_norm": 4.614632546100244, + "learning_rate": 1.8441317720976482e-06, + "loss": 0.5363, + "step": 10749 + }, + { + "epoch": 0.44359164809771395, + "grad_norm": 3.6740870242895243, + "learning_rate": 1.8439366402672291e-06, + "loss": 0.5427, + "step": 10750 + }, + { + "epoch": 0.44363291243707187, + "grad_norm": 9.228269084653643, + "learning_rate": 1.8437415022932415e-06, + "loss": 0.5661, + "step": 10751 + }, + { + "epoch": 0.4436741767764298, + "grad_norm": 2.848861202173084, + "learning_rate": 1.8435463581791704e-06, + "loss": 0.522, + "step": 10752 + }, + { + "epoch": 0.44371544111578776, + "grad_norm": 2.61253599109179, + "learning_rate": 1.843351207928502e-06, + "loss": 0.4846, + "step": 10753 + }, + { + "epoch": 0.4437567054551457, + "grad_norm": 5.484814159718974, + "learning_rate": 1.843156051544722e-06, + "loss": 0.5348, + "step": 10754 + }, + { + "epoch": 0.4437979697945036, + "grad_norm": 4.255180056784701, + "learning_rate": 1.8429608890313166e-06, + "loss": 0.561, + "step": 10755 + }, + { + "epoch": 0.4438392341338615, + "grad_norm": 2.878908363463462, + "learning_rate": 1.842765720391772e-06, + "loss": 0.5358, + "step": 10756 + }, + { + "epoch": 0.4438804984732194, + "grad_norm": 3.124952417360722, + "learning_rate": 1.8425705456295737e-06, + "loss": 0.5258, + "step": 10757 + }, + { + "epoch": 0.4439217628125774, + "grad_norm": 3.284972702218809, + "learning_rate": 1.8423753647482087e-06, + "loss": 0.564, + "step": 10758 + }, + { + "epoch": 0.4439630271519353, + "grad_norm": 2.33358767763415, + "learning_rate": 1.842180177751163e-06, + "loss": 0.4785, + "step": 10759 + }, + { + "epoch": 0.4440042914912932, + "grad_norm": 3.535706149504319, + "learning_rate": 1.8419849846419238e-06, + "loss": 0.4845, + "step": 10760 + }, + { + "epoch": 0.44404555583065114, + "grad_norm": 2.9995381644142194, + "learning_rate": 1.841789785423977e-06, + "loss": 0.5214, + "step": 10761 + }, + { + "epoch": 0.44408682017000906, + "grad_norm": 2.224843160660056, + "learning_rate": 1.8415945801008093e-06, + "loss": 0.5176, + "step": 10762 + }, + { + "epoch": 0.444128084509367, + "grad_norm": 4.326561432351867, + "learning_rate": 1.8413993686759085e-06, + "loss": 0.4863, + "step": 10763 + }, + { + "epoch": 0.44416934884872494, + "grad_norm": 4.8500692379545045, + "learning_rate": 1.8412041511527607e-06, + "loss": 0.5739, + "step": 10764 + }, + { + "epoch": 0.44421061318808286, + "grad_norm": 2.665148662213262, + "learning_rate": 1.841008927534853e-06, + "loss": 0.509, + "step": 10765 + }, + { + "epoch": 0.4442518775274408, + "grad_norm": 3.821371614342273, + "learning_rate": 1.8408136978256725e-06, + "loss": 0.5412, + "step": 10766 + }, + { + "epoch": 0.4442931418667987, + "grad_norm": 15.43205942335223, + "learning_rate": 1.840618462028707e-06, + "loss": 0.5362, + "step": 10767 + }, + { + "epoch": 0.44433440620615666, + "grad_norm": 1.9751764292819374, + "learning_rate": 1.840423220147444e-06, + "loss": 0.5472, + "step": 10768 + }, + { + "epoch": 0.4443756705455146, + "grad_norm": 2.1262486379784167, + "learning_rate": 1.8402279721853706e-06, + "loss": 0.4897, + "step": 10769 + }, + { + "epoch": 0.4444169348848725, + "grad_norm": 3.8460017379830775, + "learning_rate": 1.8400327181459743e-06, + "loss": 0.5329, + "step": 10770 + }, + { + "epoch": 0.4444581992242304, + "grad_norm": 7.3084630429532655, + "learning_rate": 1.8398374580327432e-06, + "loss": 0.5336, + "step": 10771 + }, + { + "epoch": 0.4444994635635883, + "grad_norm": 3.1297430520427723, + "learning_rate": 1.8396421918491647e-06, + "loss": 0.5052, + "step": 10772 + }, + { + "epoch": 0.4445407279029463, + "grad_norm": 2.8633840650306093, + "learning_rate": 1.839446919598727e-06, + "loss": 0.5344, + "step": 10773 + }, + { + "epoch": 0.4445819922423042, + "grad_norm": 3.0096925415056823, + "learning_rate": 1.8392516412849183e-06, + "loss": 0.5259, + "step": 10774 + }, + { + "epoch": 0.44462325658166213, + "grad_norm": 3.343688725164626, + "learning_rate": 1.8390563569112264e-06, + "loss": 0.5634, + "step": 10775 + }, + { + "epoch": 0.44466452092102005, + "grad_norm": 4.541836012522897, + "learning_rate": 1.8388610664811404e-06, + "loss": 0.4756, + "step": 10776 + }, + { + "epoch": 0.44470578526037796, + "grad_norm": 7.638705423948815, + "learning_rate": 1.8386657699981477e-06, + "loss": 0.5361, + "step": 10777 + }, + { + "epoch": 0.44474704959973593, + "grad_norm": 3.6773258044122734, + "learning_rate": 1.838470467465737e-06, + "loss": 0.5899, + "step": 10778 + }, + { + "epoch": 0.44478831393909385, + "grad_norm": 2.702561491445036, + "learning_rate": 1.8382751588873972e-06, + "loss": 0.5048, + "step": 10779 + }, + { + "epoch": 0.44482957827845176, + "grad_norm": 3.5528821372679555, + "learning_rate": 1.8380798442666165e-06, + "loss": 0.5235, + "step": 10780 + }, + { + "epoch": 0.4448708426178097, + "grad_norm": 2.763027302976038, + "learning_rate": 1.8378845236068842e-06, + "loss": 0.5374, + "step": 10781 + }, + { + "epoch": 0.4449121069571676, + "grad_norm": 2.4010208050693786, + "learning_rate": 1.8376891969116895e-06, + "loss": 0.493, + "step": 10782 + }, + { + "epoch": 0.44495337129652557, + "grad_norm": 7.579556329678726, + "learning_rate": 1.8374938641845206e-06, + "loss": 0.5675, + "step": 10783 + }, + { + "epoch": 0.4449946356358835, + "grad_norm": 6.307942704009519, + "learning_rate": 1.837298525428867e-06, + "loss": 0.4763, + "step": 10784 + }, + { + "epoch": 0.4450358999752414, + "grad_norm": 2.6857970028321168, + "learning_rate": 1.8371031806482176e-06, + "loss": 0.5257, + "step": 10785 + }, + { + "epoch": 0.4450771643145993, + "grad_norm": 3.5587839819262257, + "learning_rate": 1.8369078298460625e-06, + "loss": 0.556, + "step": 10786 + }, + { + "epoch": 0.44511842865395723, + "grad_norm": 5.190818796794204, + "learning_rate": 1.8367124730258904e-06, + "loss": 0.5408, + "step": 10787 + }, + { + "epoch": 0.4451596929933152, + "grad_norm": 2.7263498073975634, + "learning_rate": 1.8365171101911918e-06, + "loss": 0.5318, + "step": 10788 + }, + { + "epoch": 0.4452009573326731, + "grad_norm": 5.722795525353455, + "learning_rate": 1.8363217413454557e-06, + "loss": 0.5292, + "step": 10789 + }, + { + "epoch": 0.44524222167203104, + "grad_norm": 13.758470645609835, + "learning_rate": 1.8361263664921717e-06, + "loss": 0.528, + "step": 10790 + }, + { + "epoch": 0.44528348601138895, + "grad_norm": 3.733802570382449, + "learning_rate": 1.8359309856348297e-06, + "loss": 0.5322, + "step": 10791 + }, + { + "epoch": 0.44532475035074687, + "grad_norm": 4.105649883204394, + "learning_rate": 1.8357355987769202e-06, + "loss": 0.5432, + "step": 10792 + }, + { + "epoch": 0.44536601469010484, + "grad_norm": 3.0295090753885687, + "learning_rate": 1.8355402059219328e-06, + "loss": 0.5184, + "step": 10793 + }, + { + "epoch": 0.44540727902946275, + "grad_norm": 4.431088209933915, + "learning_rate": 1.8353448070733584e-06, + "loss": 0.554, + "step": 10794 + }, + { + "epoch": 0.44544854336882067, + "grad_norm": 2.6353450118199464, + "learning_rate": 1.8351494022346864e-06, + "loss": 0.4586, + "step": 10795 + }, + { + "epoch": 0.4454898077081786, + "grad_norm": 4.343619384882942, + "learning_rate": 1.8349539914094077e-06, + "loss": 0.515, + "step": 10796 + }, + { + "epoch": 0.4455310720475365, + "grad_norm": 5.820496825092964, + "learning_rate": 1.8347585746010131e-06, + "loss": 0.5735, + "step": 10797 + }, + { + "epoch": 0.4455723363868944, + "grad_norm": 3.643576669933425, + "learning_rate": 1.8345631518129924e-06, + "loss": 0.4781, + "step": 10798 + }, + { + "epoch": 0.4456136007262524, + "grad_norm": 5.454130151266026, + "learning_rate": 1.8343677230488375e-06, + "loss": 0.5236, + "step": 10799 + }, + { + "epoch": 0.4456548650656103, + "grad_norm": 3.3241570381022254, + "learning_rate": 1.8341722883120383e-06, + "loss": 0.5281, + "step": 10800 + }, + { + "epoch": 0.4456961294049682, + "grad_norm": 5.507290723639353, + "learning_rate": 1.8339768476060859e-06, + "loss": 0.5709, + "step": 10801 + }, + { + "epoch": 0.44573739374432614, + "grad_norm": 3.905910137015945, + "learning_rate": 1.8337814009344715e-06, + "loss": 0.5377, + "step": 10802 + }, + { + "epoch": 0.44577865808368405, + "grad_norm": 3.86913898804442, + "learning_rate": 1.833585948300687e-06, + "loss": 0.5504, + "step": 10803 + }, + { + "epoch": 0.445819922423042, + "grad_norm": 6.714000623802749, + "learning_rate": 1.8333904897082218e-06, + "loss": 0.5329, + "step": 10804 + }, + { + "epoch": 0.44586118676239994, + "grad_norm": 9.836078895477248, + "learning_rate": 1.8331950251605692e-06, + "loss": 0.5464, + "step": 10805 + }, + { + "epoch": 0.44590245110175786, + "grad_norm": 6.08384029114442, + "learning_rate": 1.83299955466122e-06, + "loss": 0.5705, + "step": 10806 + }, + { + "epoch": 0.4459437154411158, + "grad_norm": 3.173132452347083, + "learning_rate": 1.8328040782136656e-06, + "loss": 0.5071, + "step": 10807 + }, + { + "epoch": 0.4459849797804737, + "grad_norm": 4.242117718302245, + "learning_rate": 1.8326085958213975e-06, + "loss": 0.5061, + "step": 10808 + }, + { + "epoch": 0.44602624411983166, + "grad_norm": 2.9137606847399304, + "learning_rate": 1.8324131074879082e-06, + "loss": 0.5576, + "step": 10809 + }, + { + "epoch": 0.4460675084591896, + "grad_norm": 6.733462429909611, + "learning_rate": 1.8322176132166889e-06, + "loss": 0.529, + "step": 10810 + }, + { + "epoch": 0.4461087727985475, + "grad_norm": 3.5227774631384814, + "learning_rate": 1.8320221130112321e-06, + "loss": 0.4758, + "step": 10811 + }, + { + "epoch": 0.4461500371379054, + "grad_norm": 3.8945983840426313, + "learning_rate": 1.8318266068750296e-06, + "loss": 0.5603, + "step": 10812 + }, + { + "epoch": 0.4461913014772633, + "grad_norm": 11.887766483104823, + "learning_rate": 1.8316310948115741e-06, + "loss": 0.5725, + "step": 10813 + }, + { + "epoch": 0.4462325658166213, + "grad_norm": 3.7104180470757337, + "learning_rate": 1.8314355768243573e-06, + "loss": 0.5415, + "step": 10814 + }, + { + "epoch": 0.4462738301559792, + "grad_norm": 2.9978954336376535, + "learning_rate": 1.8312400529168727e-06, + "loss": 0.5154, + "step": 10815 + }, + { + "epoch": 0.44631509449533713, + "grad_norm": 3.8133108568753213, + "learning_rate": 1.8310445230926114e-06, + "loss": 0.5385, + "step": 10816 + }, + { + "epoch": 0.44635635883469504, + "grad_norm": 3.0736921422467818, + "learning_rate": 1.830848987355067e-06, + "loss": 0.5316, + "step": 10817 + }, + { + "epoch": 0.44639762317405296, + "grad_norm": 4.199263689799268, + "learning_rate": 1.8306534457077317e-06, + "loss": 0.5168, + "step": 10818 + }, + { + "epoch": 0.44643888751341093, + "grad_norm": 3.423880676778083, + "learning_rate": 1.8304578981540993e-06, + "loss": 0.5564, + "step": 10819 + }, + { + "epoch": 0.44648015185276885, + "grad_norm": 5.198818115953347, + "learning_rate": 1.8302623446976617e-06, + "loss": 0.5047, + "step": 10820 + }, + { + "epoch": 0.44652141619212676, + "grad_norm": 3.3033793489116414, + "learning_rate": 1.8300667853419126e-06, + "loss": 0.5788, + "step": 10821 + }, + { + "epoch": 0.4465626805314847, + "grad_norm": 2.4511319076356632, + "learning_rate": 1.8298712200903452e-06, + "loss": 0.5156, + "step": 10822 + }, + { + "epoch": 0.4466039448708426, + "grad_norm": 2.6590379348572735, + "learning_rate": 1.8296756489464524e-06, + "loss": 0.5482, + "step": 10823 + }, + { + "epoch": 0.44664520921020057, + "grad_norm": 6.74109013244823, + "learning_rate": 1.8294800719137275e-06, + "loss": 0.5269, + "step": 10824 + }, + { + "epoch": 0.4466864735495585, + "grad_norm": 2.850085762756498, + "learning_rate": 1.8292844889956648e-06, + "loss": 0.5009, + "step": 10825 + }, + { + "epoch": 0.4467277378889164, + "grad_norm": 2.972789860736984, + "learning_rate": 1.829088900195757e-06, + "loss": 0.4672, + "step": 10826 + }, + { + "epoch": 0.4467690022282743, + "grad_norm": 2.0168751457255416, + "learning_rate": 1.8288933055174985e-06, + "loss": 0.5372, + "step": 10827 + }, + { + "epoch": 0.44681026656763223, + "grad_norm": 3.974200497590321, + "learning_rate": 1.8286977049643827e-06, + "loss": 0.6107, + "step": 10828 + }, + { + "epoch": 0.4468515309069902, + "grad_norm": 8.52585243759402, + "learning_rate": 1.8285020985399036e-06, + "loss": 0.5299, + "step": 10829 + }, + { + "epoch": 0.4468927952463481, + "grad_norm": 5.055459908515184, + "learning_rate": 1.8283064862475549e-06, + "loss": 0.5388, + "step": 10830 + }, + { + "epoch": 0.44693405958570603, + "grad_norm": 3.9539292238560795, + "learning_rate": 1.8281108680908315e-06, + "loss": 0.5176, + "step": 10831 + }, + { + "epoch": 0.44697532392506395, + "grad_norm": 2.5068181193663492, + "learning_rate": 1.827915244073227e-06, + "loss": 0.5679, + "step": 10832 + }, + { + "epoch": 0.44701658826442187, + "grad_norm": 2.9790723374202375, + "learning_rate": 1.8277196141982359e-06, + "loss": 0.5428, + "step": 10833 + }, + { + "epoch": 0.44705785260377984, + "grad_norm": 2.4554330499748014, + "learning_rate": 1.8275239784693526e-06, + "loss": 0.5159, + "step": 10834 + }, + { + "epoch": 0.44709911694313775, + "grad_norm": 2.613946520933606, + "learning_rate": 1.8273283368900717e-06, + "loss": 0.5186, + "step": 10835 + }, + { + "epoch": 0.44714038128249567, + "grad_norm": 2.627755618199563, + "learning_rate": 1.8271326894638885e-06, + "loss": 0.5373, + "step": 10836 + }, + { + "epoch": 0.4471816456218536, + "grad_norm": 12.91985137685354, + "learning_rate": 1.8269370361942961e-06, + "loss": 0.5426, + "step": 10837 + }, + { + "epoch": 0.4472229099612115, + "grad_norm": 3.4783975570278036, + "learning_rate": 1.8267413770847913e-06, + "loss": 0.553, + "step": 10838 + }, + { + "epoch": 0.4472641743005695, + "grad_norm": 3.256886575500173, + "learning_rate": 1.8265457121388675e-06, + "loss": 0.5961, + "step": 10839 + }, + { + "epoch": 0.4473054386399274, + "grad_norm": 1.975060150947496, + "learning_rate": 1.8263500413600205e-06, + "loss": 0.4854, + "step": 10840 + }, + { + "epoch": 0.4473467029792853, + "grad_norm": 6.964960252230649, + "learning_rate": 1.8261543647517454e-06, + "loss": 0.5142, + "step": 10841 + }, + { + "epoch": 0.4473879673186432, + "grad_norm": 9.352387751977394, + "learning_rate": 1.8259586823175374e-06, + "loss": 0.5198, + "step": 10842 + }, + { + "epoch": 0.44742923165800114, + "grad_norm": 5.7944537318171045, + "learning_rate": 1.8257629940608917e-06, + "loss": 0.5584, + "step": 10843 + }, + { + "epoch": 0.4474704959973591, + "grad_norm": 4.121749645427395, + "learning_rate": 1.8255672999853045e-06, + "loss": 0.4693, + "step": 10844 + }, + { + "epoch": 0.447511760336717, + "grad_norm": 22.00723816084541, + "learning_rate": 1.8253716000942703e-06, + "loss": 0.5278, + "step": 10845 + }, + { + "epoch": 0.44755302467607494, + "grad_norm": 3.0589291301541266, + "learning_rate": 1.8251758943912862e-06, + "loss": 0.5068, + "step": 10846 + }, + { + "epoch": 0.44759428901543286, + "grad_norm": 4.402610182795723, + "learning_rate": 1.824980182879846e-06, + "loss": 0.5911, + "step": 10847 + }, + { + "epoch": 0.44763555335479077, + "grad_norm": 2.588846805556192, + "learning_rate": 1.8247844655634482e-06, + "loss": 0.4948, + "step": 10848 + }, + { + "epoch": 0.44767681769414874, + "grad_norm": 10.350323276878425, + "learning_rate": 1.8245887424455862e-06, + "loss": 0.5072, + "step": 10849 + }, + { + "epoch": 0.44771808203350666, + "grad_norm": 3.717482258994859, + "learning_rate": 1.8243930135297577e-06, + "loss": 0.5423, + "step": 10850 + }, + { + "epoch": 0.4477593463728646, + "grad_norm": 3.4461714270597525, + "learning_rate": 1.8241972788194581e-06, + "loss": 0.4886, + "step": 10851 + }, + { + "epoch": 0.4478006107122225, + "grad_norm": 2.737190367683454, + "learning_rate": 1.8240015383181844e-06, + "loss": 0.5438, + "step": 10852 + }, + { + "epoch": 0.4478418750515804, + "grad_norm": 11.760917936370353, + "learning_rate": 1.8238057920294328e-06, + "loss": 0.5675, + "step": 10853 + }, + { + "epoch": 0.4478831393909384, + "grad_norm": 3.532215041069159, + "learning_rate": 1.8236100399566997e-06, + "loss": 0.5543, + "step": 10854 + }, + { + "epoch": 0.4479244037302963, + "grad_norm": 2.1967960365618207, + "learning_rate": 1.8234142821034815e-06, + "loss": 0.5024, + "step": 10855 + }, + { + "epoch": 0.4479656680696542, + "grad_norm": 3.515869495054739, + "learning_rate": 1.8232185184732756e-06, + "loss": 0.5706, + "step": 10856 + }, + { + "epoch": 0.4480069324090121, + "grad_norm": 6.734472464687587, + "learning_rate": 1.8230227490695778e-06, + "loss": 0.5281, + "step": 10857 + }, + { + "epoch": 0.44804819674837004, + "grad_norm": 2.502631307235123, + "learning_rate": 1.822826973895886e-06, + "loss": 0.4728, + "step": 10858 + }, + { + "epoch": 0.44808946108772796, + "grad_norm": 3.648354304751374, + "learning_rate": 1.8226311929556967e-06, + "loss": 0.5223, + "step": 10859 + }, + { + "epoch": 0.44813072542708593, + "grad_norm": 10.076190962334621, + "learning_rate": 1.8224354062525073e-06, + "loss": 0.5061, + "step": 10860 + }, + { + "epoch": 0.44817198976644385, + "grad_norm": 1.9768482875588043, + "learning_rate": 1.822239613789815e-06, + "loss": 0.554, + "step": 10861 + }, + { + "epoch": 0.44821325410580176, + "grad_norm": 3.0890736597025565, + "learning_rate": 1.8220438155711172e-06, + "loss": 0.5066, + "step": 10862 + }, + { + "epoch": 0.4482545184451597, + "grad_norm": 6.794041065815296, + "learning_rate": 1.8218480115999107e-06, + "loss": 0.5155, + "step": 10863 + }, + { + "epoch": 0.4482957827845176, + "grad_norm": 4.317528182185266, + "learning_rate": 1.8216522018796941e-06, + "loss": 0.532, + "step": 10864 + }, + { + "epoch": 0.44833704712387557, + "grad_norm": 16.77474271848415, + "learning_rate": 1.8214563864139639e-06, + "loss": 0.576, + "step": 10865 + }, + { + "epoch": 0.4483783114632335, + "grad_norm": 3.6250006364412948, + "learning_rate": 1.821260565206219e-06, + "loss": 0.532, + "step": 10866 + }, + { + "epoch": 0.4484195758025914, + "grad_norm": 9.054471470646455, + "learning_rate": 1.8210647382599565e-06, + "loss": 0.5445, + "step": 10867 + }, + { + "epoch": 0.4484608401419493, + "grad_norm": 29.15351738800423, + "learning_rate": 1.8208689055786753e-06, + "loss": 0.5593, + "step": 10868 + }, + { + "epoch": 0.44850210448130723, + "grad_norm": 2.559452640022817, + "learning_rate": 1.8206730671658717e-06, + "loss": 0.5284, + "step": 10869 + }, + { + "epoch": 0.4485433688206652, + "grad_norm": 4.3307778724792225, + "learning_rate": 1.8204772230250455e-06, + "loss": 0.5173, + "step": 10870 + }, + { + "epoch": 0.4485846331600231, + "grad_norm": 5.40770637076082, + "learning_rate": 1.8202813731596941e-06, + "loss": 0.5597, + "step": 10871 + }, + { + "epoch": 0.44862589749938103, + "grad_norm": 2.84253508638689, + "learning_rate": 1.8200855175733166e-06, + "loss": 0.5635, + "step": 10872 + }, + { + "epoch": 0.44866716183873895, + "grad_norm": 3.6192627803158377, + "learning_rate": 1.8198896562694104e-06, + "loss": 0.5034, + "step": 10873 + }, + { + "epoch": 0.44870842617809686, + "grad_norm": 2.3868675112244437, + "learning_rate": 1.8196937892514753e-06, + "loss": 0.5541, + "step": 10874 + }, + { + "epoch": 0.44874969051745484, + "grad_norm": 4.064471905896618, + "learning_rate": 1.8194979165230091e-06, + "loss": 0.4959, + "step": 10875 + }, + { + "epoch": 0.44879095485681275, + "grad_norm": 2.734375204535771, + "learning_rate": 1.8193020380875107e-06, + "loss": 0.5202, + "step": 10876 + }, + { + "epoch": 0.44883221919617067, + "grad_norm": 3.5167969850821614, + "learning_rate": 1.8191061539484794e-06, + "loss": 0.4981, + "step": 10877 + }, + { + "epoch": 0.4488734835355286, + "grad_norm": 2.3957195553062216, + "learning_rate": 1.818910264109414e-06, + "loss": 0.5266, + "step": 10878 + }, + { + "epoch": 0.4489147478748865, + "grad_norm": 4.55251297973611, + "learning_rate": 1.818714368573813e-06, + "loss": 0.5549, + "step": 10879 + }, + { + "epoch": 0.44895601221424447, + "grad_norm": 3.460989858351945, + "learning_rate": 1.8185184673451763e-06, + "loss": 0.4953, + "step": 10880 + }, + { + "epoch": 0.4489972765536024, + "grad_norm": 2.344514285298912, + "learning_rate": 1.8183225604270032e-06, + "loss": 0.4828, + "step": 10881 + }, + { + "epoch": 0.4490385408929603, + "grad_norm": 9.255307855131804, + "learning_rate": 1.8181266478227928e-06, + "loss": 0.5256, + "step": 10882 + }, + { + "epoch": 0.4490798052323182, + "grad_norm": 5.015394916365642, + "learning_rate": 1.8179307295360443e-06, + "loss": 0.5783, + "step": 10883 + }, + { + "epoch": 0.44912106957167613, + "grad_norm": 21.35044443252898, + "learning_rate": 1.8177348055702577e-06, + "loss": 0.5394, + "step": 10884 + }, + { + "epoch": 0.4491623339110341, + "grad_norm": 4.060934893539081, + "learning_rate": 1.817538875928933e-06, + "loss": 0.5813, + "step": 10885 + }, + { + "epoch": 0.449203598250392, + "grad_norm": 5.43878842312928, + "learning_rate": 1.817342940615569e-06, + "loss": 0.4922, + "step": 10886 + }, + { + "epoch": 0.44924486258974994, + "grad_norm": 2.3603044954399044, + "learning_rate": 1.817146999633667e-06, + "loss": 0.5301, + "step": 10887 + }, + { + "epoch": 0.44928612692910785, + "grad_norm": 3.0026762204270603, + "learning_rate": 1.8169510529867257e-06, + "loss": 0.5172, + "step": 10888 + }, + { + "epoch": 0.44932739126846577, + "grad_norm": 3.364554695061171, + "learning_rate": 1.8167551006782458e-06, + "loss": 0.5935, + "step": 10889 + }, + { + "epoch": 0.44936865560782374, + "grad_norm": 3.4876679748046926, + "learning_rate": 1.8165591427117272e-06, + "loss": 0.5538, + "step": 10890 + }, + { + "epoch": 0.44940991994718166, + "grad_norm": 3.57751617668692, + "learning_rate": 1.8163631790906709e-06, + "loss": 0.5126, + "step": 10891 + }, + { + "epoch": 0.4494511842865396, + "grad_norm": 2.7094839511654154, + "learning_rate": 1.8161672098185765e-06, + "loss": 0.4927, + "step": 10892 + }, + { + "epoch": 0.4494924486258975, + "grad_norm": 2.4063248533951396, + "learning_rate": 1.8159712348989447e-06, + "loss": 0.5719, + "step": 10893 + }, + { + "epoch": 0.4495337129652554, + "grad_norm": 6.383892336125751, + "learning_rate": 1.8157752543352767e-06, + "loss": 0.5673, + "step": 10894 + }, + { + "epoch": 0.4495749773046134, + "grad_norm": 2.2175828570329283, + "learning_rate": 1.8155792681310724e-06, + "loss": 0.5909, + "step": 10895 + }, + { + "epoch": 0.4496162416439713, + "grad_norm": 5.4172744511725375, + "learning_rate": 1.8153832762898326e-06, + "loss": 0.5356, + "step": 10896 + }, + { + "epoch": 0.4496575059833292, + "grad_norm": 3.3856781923007526, + "learning_rate": 1.8151872788150585e-06, + "loss": 0.521, + "step": 10897 + }, + { + "epoch": 0.4496987703226871, + "grad_norm": 9.91242341458858, + "learning_rate": 1.8149912757102513e-06, + "loss": 0.5727, + "step": 10898 + }, + { + "epoch": 0.44974003466204504, + "grad_norm": 5.137920368051289, + "learning_rate": 1.8147952669789123e-06, + "loss": 0.5087, + "step": 10899 + }, + { + "epoch": 0.449781299001403, + "grad_norm": 3.261432932128427, + "learning_rate": 1.814599252624542e-06, + "loss": 0.5394, + "step": 10900 + }, + { + "epoch": 0.44982256334076093, + "grad_norm": 3.02136358681081, + "learning_rate": 1.8144032326506422e-06, + "loss": 0.5471, + "step": 10901 + }, + { + "epoch": 0.44986382768011884, + "grad_norm": 7.656096623135562, + "learning_rate": 1.814207207060714e-06, + "loss": 0.569, + "step": 10902 + }, + { + "epoch": 0.44990509201947676, + "grad_norm": 3.897587147679126, + "learning_rate": 1.814011175858259e-06, + "loss": 0.5771, + "step": 10903 + }, + { + "epoch": 0.4499463563588347, + "grad_norm": 6.927935002131582, + "learning_rate": 1.813815139046779e-06, + "loss": 0.5378, + "step": 10904 + }, + { + "epoch": 0.44998762069819265, + "grad_norm": 14.397279329719185, + "learning_rate": 1.8136190966297757e-06, + "loss": 0.5374, + "step": 10905 + }, + { + "epoch": 0.45002888503755056, + "grad_norm": 10.325684343135494, + "learning_rate": 1.8134230486107502e-06, + "loss": 0.5025, + "step": 10906 + }, + { + "epoch": 0.4500701493769085, + "grad_norm": 3.5708854360830045, + "learning_rate": 1.8132269949932056e-06, + "loss": 0.5667, + "step": 10907 + }, + { + "epoch": 0.4501114137162664, + "grad_norm": 2.4976446604197076, + "learning_rate": 1.813030935780643e-06, + "loss": 0.5502, + "step": 10908 + }, + { + "epoch": 0.4501526780556243, + "grad_norm": 16.013760589636068, + "learning_rate": 1.8128348709765651e-06, + "loss": 0.6168, + "step": 10909 + }, + { + "epoch": 0.4501939423949823, + "grad_norm": 4.6178228051622705, + "learning_rate": 1.8126388005844732e-06, + "loss": 0.4999, + "step": 10910 + }, + { + "epoch": 0.4502352067343402, + "grad_norm": 19.803418845303803, + "learning_rate": 1.8124427246078708e-06, + "loss": 0.5482, + "step": 10911 + }, + { + "epoch": 0.4502764710736981, + "grad_norm": 3.9108650294790004, + "learning_rate": 1.8122466430502594e-06, + "loss": 0.514, + "step": 10912 + }, + { + "epoch": 0.45031773541305603, + "grad_norm": 5.449559806597961, + "learning_rate": 1.8120505559151423e-06, + "loss": 0.5472, + "step": 10913 + }, + { + "epoch": 0.45035899975241395, + "grad_norm": 7.408743589497658, + "learning_rate": 1.8118544632060213e-06, + "loss": 0.5456, + "step": 10914 + }, + { + "epoch": 0.4504002640917719, + "grad_norm": 16.482786994138173, + "learning_rate": 1.8116583649263992e-06, + "loss": 0.5735, + "step": 10915 + }, + { + "epoch": 0.45044152843112983, + "grad_norm": 7.787693645815414, + "learning_rate": 1.8114622610797794e-06, + "loss": 0.4886, + "step": 10916 + }, + { + "epoch": 0.45048279277048775, + "grad_norm": 31.35132515483693, + "learning_rate": 1.8112661516696645e-06, + "loss": 0.5518, + "step": 10917 + }, + { + "epoch": 0.45052405710984567, + "grad_norm": 4.065317175593473, + "learning_rate": 1.8110700366995574e-06, + "loss": 0.5164, + "step": 10918 + }, + { + "epoch": 0.4505653214492036, + "grad_norm": 3.6389187029771746, + "learning_rate": 1.810873916172961e-06, + "loss": 0.5464, + "step": 10919 + }, + { + "epoch": 0.4506065857885615, + "grad_norm": 5.631853746647101, + "learning_rate": 1.8106777900933792e-06, + "loss": 0.5988, + "step": 10920 + }, + { + "epoch": 0.45064785012791947, + "grad_norm": 3.224799040410553, + "learning_rate": 1.8104816584643145e-06, + "loss": 0.5052, + "step": 10921 + }, + { + "epoch": 0.4506891144672774, + "grad_norm": 3.39231060288522, + "learning_rate": 1.8102855212892708e-06, + "loss": 0.5067, + "step": 10922 + }, + { + "epoch": 0.4507303788066353, + "grad_norm": 2.8184399584468793, + "learning_rate": 1.8100893785717513e-06, + "loss": 0.5638, + "step": 10923 + }, + { + "epoch": 0.4507716431459932, + "grad_norm": 2.8008791827307262, + "learning_rate": 1.8098932303152602e-06, + "loss": 0.5394, + "step": 10924 + }, + { + "epoch": 0.45081290748535113, + "grad_norm": 4.138107812054958, + "learning_rate": 1.8096970765233005e-06, + "loss": 0.5392, + "step": 10925 + }, + { + "epoch": 0.4508541718247091, + "grad_norm": 2.6603565662101567, + "learning_rate": 1.8095009171993764e-06, + "loss": 0.4765, + "step": 10926 + }, + { + "epoch": 0.450895436164067, + "grad_norm": 4.428502275693595, + "learning_rate": 1.8093047523469916e-06, + "loss": 0.5241, + "step": 10927 + }, + { + "epoch": 0.45093670050342494, + "grad_norm": 3.670505908668173, + "learning_rate": 1.8091085819696501e-06, + "loss": 0.4941, + "step": 10928 + }, + { + "epoch": 0.45097796484278285, + "grad_norm": 2.5023895912736327, + "learning_rate": 1.8089124060708559e-06, + "loss": 0.496, + "step": 10929 + }, + { + "epoch": 0.45101922918214077, + "grad_norm": 5.3686401342642895, + "learning_rate": 1.808716224654114e-06, + "loss": 0.5462, + "step": 10930 + }, + { + "epoch": 0.45106049352149874, + "grad_norm": 7.203664412398906, + "learning_rate": 1.808520037722927e-06, + "loss": 0.5328, + "step": 10931 + }, + { + "epoch": 0.45110175786085666, + "grad_norm": 13.406699741521662, + "learning_rate": 1.8083238452808013e-06, + "loss": 0.5278, + "step": 10932 + }, + { + "epoch": 0.45114302220021457, + "grad_norm": 30.76850405343324, + "learning_rate": 1.8081276473312402e-06, + "loss": 0.5297, + "step": 10933 + }, + { + "epoch": 0.4511842865395725, + "grad_norm": 3.777632287523577, + "learning_rate": 1.8079314438777484e-06, + "loss": 0.5335, + "step": 10934 + }, + { + "epoch": 0.4512255508789304, + "grad_norm": 2.9409351282121947, + "learning_rate": 1.80773523492383e-06, + "loss": 0.5098, + "step": 10935 + }, + { + "epoch": 0.4512668152182884, + "grad_norm": 3.489826376105397, + "learning_rate": 1.8075390204729916e-06, + "loss": 0.4449, + "step": 10936 + }, + { + "epoch": 0.4513080795576463, + "grad_norm": 3.650406881644109, + "learning_rate": 1.807342800528736e-06, + "loss": 0.5523, + "step": 10937 + }, + { + "epoch": 0.4513493438970042, + "grad_norm": 7.089108641389478, + "learning_rate": 1.8071465750945696e-06, + "loss": 0.5431, + "step": 10938 + }, + { + "epoch": 0.4513906082363621, + "grad_norm": 3.5251932665460113, + "learning_rate": 1.8069503441739968e-06, + "loss": 0.4434, + "step": 10939 + }, + { + "epoch": 0.45143187257572004, + "grad_norm": 3.0943954445009907, + "learning_rate": 1.8067541077705237e-06, + "loss": 0.5449, + "step": 10940 + }, + { + "epoch": 0.451473136915078, + "grad_norm": 5.337748567279842, + "learning_rate": 1.8065578658876542e-06, + "loss": 0.5197, + "step": 10941 + }, + { + "epoch": 0.4515144012544359, + "grad_norm": 4.298456199753365, + "learning_rate": 1.8063616185288942e-06, + "loss": 0.5215, + "step": 10942 + }, + { + "epoch": 0.45155566559379384, + "grad_norm": 2.7632512201970685, + "learning_rate": 1.8061653656977495e-06, + "loss": 0.5254, + "step": 10943 + }, + { + "epoch": 0.45159692993315176, + "grad_norm": 3.5430976990325114, + "learning_rate": 1.8059691073977256e-06, + "loss": 0.5579, + "step": 10944 + }, + { + "epoch": 0.4516381942725097, + "grad_norm": 2.407785210035947, + "learning_rate": 1.8057728436323277e-06, + "loss": 0.5019, + "step": 10945 + }, + { + "epoch": 0.45167945861186765, + "grad_norm": 5.074956380986894, + "learning_rate": 1.8055765744050622e-06, + "loss": 0.512, + "step": 10946 + }, + { + "epoch": 0.45172072295122556, + "grad_norm": 2.727655907149956, + "learning_rate": 1.8053802997194345e-06, + "loss": 0.5549, + "step": 10947 + }, + { + "epoch": 0.4517619872905835, + "grad_norm": 3.641594166314303, + "learning_rate": 1.8051840195789509e-06, + "loss": 0.5126, + "step": 10948 + }, + { + "epoch": 0.4518032516299414, + "grad_norm": 2.758460676683853, + "learning_rate": 1.8049877339871171e-06, + "loss": 0.518, + "step": 10949 + }, + { + "epoch": 0.4518445159692993, + "grad_norm": 2.8702492080354403, + "learning_rate": 1.8047914429474393e-06, + "loss": 0.5052, + "step": 10950 + }, + { + "epoch": 0.4518857803086573, + "grad_norm": 12.23124990579422, + "learning_rate": 1.8045951464634238e-06, + "loss": 0.5838, + "step": 10951 + }, + { + "epoch": 0.4519270446480152, + "grad_norm": 2.307756527877238, + "learning_rate": 1.8043988445385773e-06, + "loss": 0.5405, + "step": 10952 + }, + { + "epoch": 0.4519683089873731, + "grad_norm": 2.155752722450413, + "learning_rate": 1.8042025371764058e-06, + "loss": 0.516, + "step": 10953 + }, + { + "epoch": 0.45200957332673103, + "grad_norm": 2.7047941487094977, + "learning_rate": 1.804006224380416e-06, + "loss": 0.5617, + "step": 10954 + }, + { + "epoch": 0.45205083766608894, + "grad_norm": 2.620849306824897, + "learning_rate": 1.8038099061541146e-06, + "loss": 0.5074, + "step": 10955 + }, + { + "epoch": 0.4520921020054469, + "grad_norm": 6.4264522824290635, + "learning_rate": 1.8036135825010082e-06, + "loss": 0.5372, + "step": 10956 + }, + { + "epoch": 0.45213336634480483, + "grad_norm": 3.122613898739933, + "learning_rate": 1.8034172534246036e-06, + "loss": 0.4922, + "step": 10957 + }, + { + "epoch": 0.45217463068416275, + "grad_norm": 2.3603365515816344, + "learning_rate": 1.803220918928408e-06, + "loss": 0.5783, + "step": 10958 + }, + { + "epoch": 0.45221589502352066, + "grad_norm": 4.802372665040942, + "learning_rate": 1.8030245790159279e-06, + "loss": 0.5656, + "step": 10959 + }, + { + "epoch": 0.4522571593628786, + "grad_norm": 1.9945422580978918, + "learning_rate": 1.802828233690671e-06, + "loss": 0.5381, + "step": 10960 + }, + { + "epoch": 0.45229842370223655, + "grad_norm": 2.9712643997839057, + "learning_rate": 1.8026318829561445e-06, + "loss": 0.5076, + "step": 10961 + }, + { + "epoch": 0.45233968804159447, + "grad_norm": 3.222875754586013, + "learning_rate": 1.8024355268158552e-06, + "loss": 0.511, + "step": 10962 + }, + { + "epoch": 0.4523809523809524, + "grad_norm": 3.1043539733202428, + "learning_rate": 1.8022391652733108e-06, + "loss": 0.5068, + "step": 10963 + }, + { + "epoch": 0.4524222167203103, + "grad_norm": 3.176262069559596, + "learning_rate": 1.8020427983320188e-06, + "loss": 0.4711, + "step": 10964 + }, + { + "epoch": 0.4524634810596682, + "grad_norm": 3.331013175986523, + "learning_rate": 1.8018464259954871e-06, + "loss": 0.5607, + "step": 10965 + }, + { + "epoch": 0.4525047453990262, + "grad_norm": 4.279398124102585, + "learning_rate": 1.801650048267223e-06, + "loss": 0.5707, + "step": 10966 + }, + { + "epoch": 0.4525460097383841, + "grad_norm": 3.236723432894301, + "learning_rate": 1.8014536651507348e-06, + "loss": 0.5433, + "step": 10967 + }, + { + "epoch": 0.452587274077742, + "grad_norm": 5.347230758871029, + "learning_rate": 1.8012572766495295e-06, + "loss": 0.5725, + "step": 10968 + }, + { + "epoch": 0.45262853841709993, + "grad_norm": 4.839224081693464, + "learning_rate": 1.8010608827671156e-06, + "loss": 0.4927, + "step": 10969 + }, + { + "epoch": 0.45266980275645785, + "grad_norm": 4.4712370917987005, + "learning_rate": 1.8008644835070013e-06, + "loss": 0.5098, + "step": 10970 + }, + { + "epoch": 0.4527110670958158, + "grad_norm": 4.365733905184011, + "learning_rate": 1.8006680788726949e-06, + "loss": 0.5462, + "step": 10971 + }, + { + "epoch": 0.45275233143517374, + "grad_norm": 4.092571019264973, + "learning_rate": 1.8004716688677038e-06, + "loss": 0.4859, + "step": 10972 + }, + { + "epoch": 0.45279359577453165, + "grad_norm": 2.801304326420354, + "learning_rate": 1.800275253495538e-06, + "loss": 0.5538, + "step": 10973 + }, + { + "epoch": 0.45283486011388957, + "grad_norm": 5.77443568863354, + "learning_rate": 1.8000788327597045e-06, + "loss": 0.5255, + "step": 10974 + }, + { + "epoch": 0.4528761244532475, + "grad_norm": 7.906947920027667, + "learning_rate": 1.7998824066637122e-06, + "loss": 0.5444, + "step": 10975 + }, + { + "epoch": 0.45291738879260546, + "grad_norm": 10.717473542773865, + "learning_rate": 1.7996859752110698e-06, + "loss": 0.5573, + "step": 10976 + }, + { + "epoch": 0.4529586531319634, + "grad_norm": 4.857137513528468, + "learning_rate": 1.7994895384052865e-06, + "loss": 0.505, + "step": 10977 + }, + { + "epoch": 0.4529999174713213, + "grad_norm": 6.254493074599386, + "learning_rate": 1.7992930962498707e-06, + "loss": 0.5188, + "step": 10978 + }, + { + "epoch": 0.4530411818106792, + "grad_norm": 2.362390146266816, + "learning_rate": 1.799096648748332e-06, + "loss": 0.6045, + "step": 10979 + }, + { + "epoch": 0.4530824461500371, + "grad_norm": 4.681212319161174, + "learning_rate": 1.7989001959041783e-06, + "loss": 0.5736, + "step": 10980 + }, + { + "epoch": 0.45312371048939504, + "grad_norm": 7.285094308301395, + "learning_rate": 1.7987037377209198e-06, + "loss": 0.5623, + "step": 10981 + }, + { + "epoch": 0.453164974828753, + "grad_norm": 2.940006638630888, + "learning_rate": 1.798507274202065e-06, + "loss": 0.5175, + "step": 10982 + }, + { + "epoch": 0.4532062391681109, + "grad_norm": 5.19499320681047, + "learning_rate": 1.7983108053511237e-06, + "loss": 0.4904, + "step": 10983 + }, + { + "epoch": 0.45324750350746884, + "grad_norm": 2.782659854185485, + "learning_rate": 1.7981143311716046e-06, + "loss": 0.5354, + "step": 10984 + }, + { + "epoch": 0.45328876784682676, + "grad_norm": 2.6508135536871804, + "learning_rate": 1.7979178516670186e-06, + "loss": 0.5555, + "step": 10985 + }, + { + "epoch": 0.4533300321861847, + "grad_norm": 3.0546369241378786, + "learning_rate": 1.7977213668408741e-06, + "loss": 0.5539, + "step": 10986 + }, + { + "epoch": 0.45337129652554264, + "grad_norm": 8.754917202466007, + "learning_rate": 1.7975248766966816e-06, + "loss": 0.5296, + "step": 10987 + }, + { + "epoch": 0.45341256086490056, + "grad_norm": 6.002786220663457, + "learning_rate": 1.7973283812379498e-06, + "loss": 0.5939, + "step": 10988 + }, + { + "epoch": 0.4534538252042585, + "grad_norm": 5.23578844263096, + "learning_rate": 1.79713188046819e-06, + "loss": 0.519, + "step": 10989 + }, + { + "epoch": 0.4534950895436164, + "grad_norm": 2.1863049879604146, + "learning_rate": 1.7969353743909109e-06, + "loss": 0.4834, + "step": 10990 + }, + { + "epoch": 0.4535363538829743, + "grad_norm": 3.648088655392519, + "learning_rate": 1.7967388630096235e-06, + "loss": 0.5577, + "step": 10991 + }, + { + "epoch": 0.4535776182223323, + "grad_norm": 9.128994905471808, + "learning_rate": 1.7965423463278373e-06, + "loss": 0.4938, + "step": 10992 + }, + { + "epoch": 0.4536188825616902, + "grad_norm": 4.089881618740287, + "learning_rate": 1.7963458243490634e-06, + "loss": 0.5388, + "step": 10993 + }, + { + "epoch": 0.4536601469010481, + "grad_norm": 13.223984524125227, + "learning_rate": 1.7961492970768117e-06, + "loss": 0.5092, + "step": 10994 + }, + { + "epoch": 0.453701411240406, + "grad_norm": 3.2087538835368252, + "learning_rate": 1.795952764514593e-06, + "loss": 0.4919, + "step": 10995 + }, + { + "epoch": 0.45374267557976394, + "grad_norm": 3.579650087351336, + "learning_rate": 1.795756226665917e-06, + "loss": 0.5094, + "step": 10996 + }, + { + "epoch": 0.4537839399191219, + "grad_norm": 3.089114593550514, + "learning_rate": 1.7955596835342952e-06, + "loss": 0.5474, + "step": 10997 + }, + { + "epoch": 0.45382520425847983, + "grad_norm": 2.980142169345597, + "learning_rate": 1.7953631351232376e-06, + "loss": 0.4698, + "step": 10998 + }, + { + "epoch": 0.45386646859783775, + "grad_norm": 4.650036836902145, + "learning_rate": 1.7951665814362563e-06, + "loss": 0.5358, + "step": 10999 + }, + { + "epoch": 0.45390773293719566, + "grad_norm": 1.9749406402503538, + "learning_rate": 1.794970022476861e-06, + "loss": 0.5611, + "step": 11000 + }, + { + "epoch": 0.4539489972765536, + "grad_norm": 2.5777424719090605, + "learning_rate": 1.7947734582485631e-06, + "loss": 0.5336, + "step": 11001 + }, + { + "epoch": 0.45399026161591155, + "grad_norm": 9.787481330174371, + "learning_rate": 1.7945768887548742e-06, + "loss": 0.5174, + "step": 11002 + }, + { + "epoch": 0.45403152595526947, + "grad_norm": 2.2159068631247174, + "learning_rate": 1.7943803139993047e-06, + "loss": 0.4851, + "step": 11003 + }, + { + "epoch": 0.4540727902946274, + "grad_norm": 5.038775741577423, + "learning_rate": 1.7941837339853666e-06, + "loss": 0.5914, + "step": 11004 + }, + { + "epoch": 0.4541140546339853, + "grad_norm": 7.783240913395902, + "learning_rate": 1.7939871487165709e-06, + "loss": 0.4978, + "step": 11005 + }, + { + "epoch": 0.4541553189733432, + "grad_norm": 6.4000463935701175, + "learning_rate": 1.79379055819643e-06, + "loss": 0.5774, + "step": 11006 + }, + { + "epoch": 0.4541965833127012, + "grad_norm": 4.9974702013970145, + "learning_rate": 1.7935939624284542e-06, + "loss": 0.5992, + "step": 11007 + }, + { + "epoch": 0.4542378476520591, + "grad_norm": 1.8092170000740802, + "learning_rate": 1.7933973614161559e-06, + "loss": 0.4754, + "step": 11008 + }, + { + "epoch": 0.454279111991417, + "grad_norm": 3.4103045391783535, + "learning_rate": 1.7932007551630466e-06, + "loss": 0.5342, + "step": 11009 + }, + { + "epoch": 0.45432037633077493, + "grad_norm": 2.0785049718862574, + "learning_rate": 1.7930041436726385e-06, + "loss": 0.5273, + "step": 11010 + }, + { + "epoch": 0.45436164067013285, + "grad_norm": 2.7820750828496736, + "learning_rate": 1.792807526948443e-06, + "loss": 0.5015, + "step": 11011 + }, + { + "epoch": 0.4544029050094908, + "grad_norm": 9.743480685228128, + "learning_rate": 1.7926109049939732e-06, + "loss": 0.539, + "step": 11012 + }, + { + "epoch": 0.45444416934884874, + "grad_norm": 9.266783814314879, + "learning_rate": 1.7924142778127402e-06, + "loss": 0.5635, + "step": 11013 + }, + { + "epoch": 0.45448543368820665, + "grad_norm": 2.902618466471508, + "learning_rate": 1.792217645408257e-06, + "loss": 0.5277, + "step": 11014 + }, + { + "epoch": 0.45452669802756457, + "grad_norm": 12.772253278489982, + "learning_rate": 1.7920210077840351e-06, + "loss": 0.5621, + "step": 11015 + }, + { + "epoch": 0.4545679623669225, + "grad_norm": 3.538057276430423, + "learning_rate": 1.7918243649435882e-06, + "loss": 0.4709, + "step": 11016 + }, + { + "epoch": 0.45460922670628046, + "grad_norm": 9.254653898667115, + "learning_rate": 1.7916277168904274e-06, + "loss": 0.5303, + "step": 11017 + }, + { + "epoch": 0.45465049104563837, + "grad_norm": 4.633389521533006, + "learning_rate": 1.7914310636280667e-06, + "loss": 0.5371, + "step": 11018 + }, + { + "epoch": 0.4546917553849963, + "grad_norm": 7.724163676596924, + "learning_rate": 1.791234405160018e-06, + "loss": 0.5073, + "step": 11019 + }, + { + "epoch": 0.4547330197243542, + "grad_norm": 2.053027644863, + "learning_rate": 1.7910377414897941e-06, + "loss": 0.5378, + "step": 11020 + }, + { + "epoch": 0.4547742840637121, + "grad_norm": 4.842216320627658, + "learning_rate": 1.790841072620908e-06, + "loss": 0.5173, + "step": 11021 + }, + { + "epoch": 0.4548155484030701, + "grad_norm": 5.395130863957861, + "learning_rate": 1.790644398556873e-06, + "loss": 0.4816, + "step": 11022 + }, + { + "epoch": 0.454856812742428, + "grad_norm": 5.153904574755966, + "learning_rate": 1.7904477193012016e-06, + "loss": 0.5827, + "step": 11023 + }, + { + "epoch": 0.4548980770817859, + "grad_norm": 3.8171005248272794, + "learning_rate": 1.7902510348574075e-06, + "loss": 0.5419, + "step": 11024 + }, + { + "epoch": 0.45493934142114384, + "grad_norm": 4.763952660226514, + "learning_rate": 1.7900543452290037e-06, + "loss": 0.5498, + "step": 11025 + }, + { + "epoch": 0.45498060576050176, + "grad_norm": 2.4599112117179835, + "learning_rate": 1.7898576504195044e-06, + "loss": 0.513, + "step": 11026 + }, + { + "epoch": 0.4550218700998597, + "grad_norm": 4.535025635209339, + "learning_rate": 1.7896609504324215e-06, + "loss": 0.5344, + "step": 11027 + }, + { + "epoch": 0.45506313443921764, + "grad_norm": 2.6442928081762864, + "learning_rate": 1.7894642452712701e-06, + "loss": 0.5101, + "step": 11028 + }, + { + "epoch": 0.45510439877857556, + "grad_norm": 5.807093458526588, + "learning_rate": 1.7892675349395625e-06, + "loss": 0.5356, + "step": 11029 + }, + { + "epoch": 0.4551456631179335, + "grad_norm": 1.9628593168756936, + "learning_rate": 1.7890708194408132e-06, + "loss": 0.4574, + "step": 11030 + }, + { + "epoch": 0.4551869274572914, + "grad_norm": 2.4892888040992838, + "learning_rate": 1.788874098778536e-06, + "loss": 0.5378, + "step": 11031 + }, + { + "epoch": 0.45522819179664936, + "grad_norm": 2.96951454121454, + "learning_rate": 1.788677372956245e-06, + "loss": 0.5249, + "step": 11032 + }, + { + "epoch": 0.4552694561360073, + "grad_norm": 2.686019838974436, + "learning_rate": 1.7884806419774539e-06, + "loss": 0.5018, + "step": 11033 + }, + { + "epoch": 0.4553107204753652, + "grad_norm": 2.6577266107600406, + "learning_rate": 1.7882839058456771e-06, + "loss": 0.5383, + "step": 11034 + }, + { + "epoch": 0.4553519848147231, + "grad_norm": 2.8037558342956284, + "learning_rate": 1.788087164564428e-06, + "loss": 0.5661, + "step": 11035 + }, + { + "epoch": 0.455393249154081, + "grad_norm": 3.2325659038982124, + "learning_rate": 1.7878904181372217e-06, + "loss": 0.4958, + "step": 11036 + }, + { + "epoch": 0.455434513493439, + "grad_norm": 4.574465652570409, + "learning_rate": 1.7876936665675717e-06, + "loss": 0.5065, + "step": 11037 + }, + { + "epoch": 0.4554757778327969, + "grad_norm": 2.9021437708827627, + "learning_rate": 1.7874969098589943e-06, + "loss": 0.5644, + "step": 11038 + }, + { + "epoch": 0.45551704217215483, + "grad_norm": 5.390637000287868, + "learning_rate": 1.7873001480150024e-06, + "loss": 0.5544, + "step": 11039 + }, + { + "epoch": 0.45555830651151275, + "grad_norm": 3.431954099524784, + "learning_rate": 1.7871033810391108e-06, + "loss": 0.5125, + "step": 11040 + }, + { + "epoch": 0.45559957085087066, + "grad_norm": 2.906930126536083, + "learning_rate": 1.7869066089348347e-06, + "loss": 0.5202, + "step": 11041 + }, + { + "epoch": 0.4556408351902286, + "grad_norm": 3.67380020896469, + "learning_rate": 1.786709831705689e-06, + "loss": 0.5005, + "step": 11042 + }, + { + "epoch": 0.45568209952958655, + "grad_norm": 4.092873797654644, + "learning_rate": 1.7865130493551886e-06, + "loss": 0.5155, + "step": 11043 + }, + { + "epoch": 0.45572336386894446, + "grad_norm": 2.9001412855903244, + "learning_rate": 1.786316261886848e-06, + "loss": 0.5393, + "step": 11044 + }, + { + "epoch": 0.4557646282083024, + "grad_norm": 3.03218423182416, + "learning_rate": 1.7861194693041834e-06, + "loss": 0.5664, + "step": 11045 + }, + { + "epoch": 0.4558058925476603, + "grad_norm": 3.1944023482429627, + "learning_rate": 1.7859226716107087e-06, + "loss": 0.4833, + "step": 11046 + }, + { + "epoch": 0.4558471568870182, + "grad_norm": 2.9996601893477703, + "learning_rate": 1.78572586880994e-06, + "loss": 0.5106, + "step": 11047 + }, + { + "epoch": 0.4558884212263762, + "grad_norm": 2.7203042531570136, + "learning_rate": 1.7855290609053923e-06, + "loss": 0.5241, + "step": 11048 + }, + { + "epoch": 0.4559296855657341, + "grad_norm": 3.4264818059601914, + "learning_rate": 1.7853322479005818e-06, + "loss": 0.55, + "step": 11049 + }, + { + "epoch": 0.455970949905092, + "grad_norm": 3.340227448461778, + "learning_rate": 1.7851354297990229e-06, + "loss": 0.5271, + "step": 11050 + }, + { + "epoch": 0.45601221424444993, + "grad_norm": 8.508590791098047, + "learning_rate": 1.7849386066042326e-06, + "loss": 0.5539, + "step": 11051 + }, + { + "epoch": 0.45605347858380785, + "grad_norm": 4.888976295194902, + "learning_rate": 1.7847417783197255e-06, + "loss": 0.5038, + "step": 11052 + }, + { + "epoch": 0.4560947429231658, + "grad_norm": 3.5631460334278926, + "learning_rate": 1.7845449449490183e-06, + "loss": 0.5233, + "step": 11053 + }, + { + "epoch": 0.45613600726252373, + "grad_norm": 4.679999514724939, + "learning_rate": 1.7843481064956261e-06, + "loss": 0.5578, + "step": 11054 + }, + { + "epoch": 0.45617727160188165, + "grad_norm": 3.053776779966635, + "learning_rate": 1.784151262963066e-06, + "loss": 0.5326, + "step": 11055 + }, + { + "epoch": 0.45621853594123957, + "grad_norm": 23.6865027958466, + "learning_rate": 1.7839544143548528e-06, + "loss": 0.5049, + "step": 11056 + }, + { + "epoch": 0.4562598002805975, + "grad_norm": 4.429304847637976, + "learning_rate": 1.7837575606745035e-06, + "loss": 0.502, + "step": 11057 + }, + { + "epoch": 0.45630106461995545, + "grad_norm": 2.940117102035905, + "learning_rate": 1.783560701925535e-06, + "loss": 0.475, + "step": 11058 + }, + { + "epoch": 0.45634232895931337, + "grad_norm": 3.9525362862827205, + "learning_rate": 1.7833638381114626e-06, + "loss": 0.5506, + "step": 11059 + }, + { + "epoch": 0.4563835932986713, + "grad_norm": 2.4870306077151616, + "learning_rate": 1.7831669692358026e-06, + "loss": 0.4692, + "step": 11060 + }, + { + "epoch": 0.4564248576380292, + "grad_norm": 5.076944442754225, + "learning_rate": 1.7829700953020729e-06, + "loss": 0.5622, + "step": 11061 + }, + { + "epoch": 0.4564661219773871, + "grad_norm": 4.494773010976541, + "learning_rate": 1.7827732163137892e-06, + "loss": 0.5161, + "step": 11062 + }, + { + "epoch": 0.4565073863167451, + "grad_norm": 2.226902839402045, + "learning_rate": 1.7825763322744685e-06, + "loss": 0.469, + "step": 11063 + }, + { + "epoch": 0.456548650656103, + "grad_norm": 3.400232693685327, + "learning_rate": 1.7823794431876273e-06, + "loss": 0.5496, + "step": 11064 + }, + { + "epoch": 0.4565899149954609, + "grad_norm": 3.8428316531151268, + "learning_rate": 1.7821825490567838e-06, + "loss": 0.5252, + "step": 11065 + }, + { + "epoch": 0.45663117933481884, + "grad_norm": 3.0237598300686384, + "learning_rate": 1.781985649885453e-06, + "loss": 0.5592, + "step": 11066 + }, + { + "epoch": 0.45667244367417675, + "grad_norm": 6.6682951278929155, + "learning_rate": 1.7817887456771537e-06, + "loss": 0.5313, + "step": 11067 + }, + { + "epoch": 0.4567137080135347, + "grad_norm": 3.0189063494663078, + "learning_rate": 1.781591836435402e-06, + "loss": 0.5741, + "step": 11068 + }, + { + "epoch": 0.45675497235289264, + "grad_norm": 2.873769825079489, + "learning_rate": 1.781394922163716e-06, + "loss": 0.5383, + "step": 11069 + }, + { + "epoch": 0.45679623669225056, + "grad_norm": 4.794574102613638, + "learning_rate": 1.7811980028656126e-06, + "loss": 0.5559, + "step": 11070 + }, + { + "epoch": 0.4568375010316085, + "grad_norm": 3.447325857599845, + "learning_rate": 1.7810010785446094e-06, + "loss": 0.5458, + "step": 11071 + }, + { + "epoch": 0.4568787653709664, + "grad_norm": 5.3290620018763475, + "learning_rate": 1.780804149204224e-06, + "loss": 0.5415, + "step": 11072 + }, + { + "epoch": 0.45692002971032436, + "grad_norm": 2.276180746521897, + "learning_rate": 1.7806072148479743e-06, + "loss": 0.5357, + "step": 11073 + }, + { + "epoch": 0.4569612940496823, + "grad_norm": 9.795888184001134, + "learning_rate": 1.7804102754793775e-06, + "loss": 0.4933, + "step": 11074 + }, + { + "epoch": 0.4570025583890402, + "grad_norm": 1.9114021877984364, + "learning_rate": 1.7802133311019517e-06, + "loss": 0.4785, + "step": 11075 + }, + { + "epoch": 0.4570438227283981, + "grad_norm": 3.4448878853521965, + "learning_rate": 1.7800163817192148e-06, + "loss": 0.5254, + "step": 11076 + }, + { + "epoch": 0.457085087067756, + "grad_norm": 2.1555166890460264, + "learning_rate": 1.779819427334685e-06, + "loss": 0.5431, + "step": 11077 + }, + { + "epoch": 0.457126351407114, + "grad_norm": 6.362738443864129, + "learning_rate": 1.7796224679518801e-06, + "loss": 0.5592, + "step": 11078 + }, + { + "epoch": 0.4571676157464719, + "grad_norm": 4.969840910700131, + "learning_rate": 1.7794255035743183e-06, + "loss": 0.5126, + "step": 11079 + }, + { + "epoch": 0.4572088800858298, + "grad_norm": 59.31639043870088, + "learning_rate": 1.7792285342055184e-06, + "loss": 0.4942, + "step": 11080 + }, + { + "epoch": 0.45725014442518774, + "grad_norm": 3.2541344235382645, + "learning_rate": 1.7790315598489977e-06, + "loss": 0.5332, + "step": 11081 + }, + { + "epoch": 0.45729140876454566, + "grad_norm": 6.634752156469051, + "learning_rate": 1.7788345805082758e-06, + "loss": 0.4707, + "step": 11082 + }, + { + "epoch": 0.45733267310390363, + "grad_norm": 3.504426894439376, + "learning_rate": 1.7786375961868703e-06, + "loss": 0.4719, + "step": 11083 + }, + { + "epoch": 0.45737393744326155, + "grad_norm": 2.8013538581554513, + "learning_rate": 1.778440606888301e-06, + "loss": 0.5487, + "step": 11084 + }, + { + "epoch": 0.45741520178261946, + "grad_norm": 2.6574889495508143, + "learning_rate": 1.7782436126160856e-06, + "loss": 0.5342, + "step": 11085 + }, + { + "epoch": 0.4574564661219774, + "grad_norm": 9.02448660514147, + "learning_rate": 1.7780466133737433e-06, + "loss": 0.5234, + "step": 11086 + }, + { + "epoch": 0.4574977304613353, + "grad_norm": 5.401594228042585, + "learning_rate": 1.7778496091647926e-06, + "loss": 0.5438, + "step": 11087 + }, + { + "epoch": 0.45753899480069327, + "grad_norm": 3.188387905530609, + "learning_rate": 1.7776525999927532e-06, + "loss": 0.5031, + "step": 11088 + }, + { + "epoch": 0.4575802591400512, + "grad_norm": 2.0320309872067583, + "learning_rate": 1.777455585861143e-06, + "loss": 0.4985, + "step": 11089 + }, + { + "epoch": 0.4576215234794091, + "grad_norm": 2.8568295531698737, + "learning_rate": 1.7772585667734827e-06, + "loss": 0.5177, + "step": 11090 + }, + { + "epoch": 0.457662787818767, + "grad_norm": 5.504982720181353, + "learning_rate": 1.777061542733291e-06, + "loss": 0.5638, + "step": 11091 + }, + { + "epoch": 0.45770405215812493, + "grad_norm": 5.099059446164261, + "learning_rate": 1.7768645137440867e-06, + "loss": 0.5242, + "step": 11092 + }, + { + "epoch": 0.4577453164974829, + "grad_norm": 11.014228380438736, + "learning_rate": 1.7766674798093895e-06, + "loss": 0.5017, + "step": 11093 + }, + { + "epoch": 0.4577865808368408, + "grad_norm": 8.389023024533662, + "learning_rate": 1.776470440932719e-06, + "loss": 0.4974, + "step": 11094 + }, + { + "epoch": 0.45782784517619873, + "grad_norm": 14.459752177286207, + "learning_rate": 1.7762733971175946e-06, + "loss": 0.4741, + "step": 11095 + }, + { + "epoch": 0.45786910951555665, + "grad_norm": 3.247314957291661, + "learning_rate": 1.7760763483675367e-06, + "loss": 0.5545, + "step": 11096 + }, + { + "epoch": 0.45791037385491457, + "grad_norm": 2.9031766215911907, + "learning_rate": 1.775879294686064e-06, + "loss": 0.4937, + "step": 11097 + }, + { + "epoch": 0.45795163819427254, + "grad_norm": 3.1801261993242083, + "learning_rate": 1.7756822360766977e-06, + "loss": 0.5117, + "step": 11098 + }, + { + "epoch": 0.45799290253363045, + "grad_norm": 2.1867058867076867, + "learning_rate": 1.7754851725429566e-06, + "loss": 0.5655, + "step": 11099 + }, + { + "epoch": 0.45803416687298837, + "grad_norm": 9.590625773046499, + "learning_rate": 1.7752881040883612e-06, + "loss": 0.5218, + "step": 11100 + }, + { + "epoch": 0.4580754312123463, + "grad_norm": 14.530884974070322, + "learning_rate": 1.7750910307164313e-06, + "loss": 0.5071, + "step": 11101 + }, + { + "epoch": 0.4581166955517042, + "grad_norm": 2.9912271372239116, + "learning_rate": 1.7748939524306877e-06, + "loss": 0.5456, + "step": 11102 + }, + { + "epoch": 0.4581579598910621, + "grad_norm": 6.7722704315731015, + "learning_rate": 1.77469686923465e-06, + "loss": 0.5704, + "step": 11103 + }, + { + "epoch": 0.4581992242304201, + "grad_norm": 9.942730202218934, + "learning_rate": 1.7744997811318401e-06, + "loss": 0.5485, + "step": 11104 + }, + { + "epoch": 0.458240488569778, + "grad_norm": 2.727623700161297, + "learning_rate": 1.7743026881257765e-06, + "loss": 0.5474, + "step": 11105 + }, + { + "epoch": 0.4582817529091359, + "grad_norm": 2.8977791336912047, + "learning_rate": 1.774105590219981e-06, + "loss": 0.5272, + "step": 11106 + }, + { + "epoch": 0.45832301724849384, + "grad_norm": 4.03074610893366, + "learning_rate": 1.7739084874179735e-06, + "loss": 0.481, + "step": 11107 + }, + { + "epoch": 0.45836428158785175, + "grad_norm": 3.1648471799925946, + "learning_rate": 1.7737113797232759e-06, + "loss": 0.4582, + "step": 11108 + }, + { + "epoch": 0.4584055459272097, + "grad_norm": 2.7575821623605172, + "learning_rate": 1.7735142671394079e-06, + "loss": 0.6094, + "step": 11109 + }, + { + "epoch": 0.45844681026656764, + "grad_norm": 2.23754726156138, + "learning_rate": 1.7733171496698911e-06, + "loss": 0.4698, + "step": 11110 + }, + { + "epoch": 0.45848807460592556, + "grad_norm": 3.1707697520516387, + "learning_rate": 1.773120027318246e-06, + "loss": 0.5104, + "step": 11111 + }, + { + "epoch": 0.45852933894528347, + "grad_norm": 2.9776127258599296, + "learning_rate": 1.7729229000879945e-06, + "loss": 0.4909, + "step": 11112 + }, + { + "epoch": 0.4585706032846414, + "grad_norm": 2.5970474272101876, + "learning_rate": 1.7727257679826566e-06, + "loss": 0.5637, + "step": 11113 + }, + { + "epoch": 0.45861186762399936, + "grad_norm": 5.249123480001796, + "learning_rate": 1.7725286310057545e-06, + "loss": 0.5325, + "step": 11114 + }, + { + "epoch": 0.4586531319633573, + "grad_norm": 8.03495848101039, + "learning_rate": 1.7723314891608091e-06, + "loss": 0.5195, + "step": 11115 + }, + { + "epoch": 0.4586943963027152, + "grad_norm": 3.376991319335498, + "learning_rate": 1.7721343424513424e-06, + "loss": 0.5198, + "step": 11116 + }, + { + "epoch": 0.4587356606420731, + "grad_norm": 15.806072424996064, + "learning_rate": 1.7719371908808755e-06, + "loss": 0.5667, + "step": 11117 + }, + { + "epoch": 0.458776924981431, + "grad_norm": 3.9874295211201995, + "learning_rate": 1.7717400344529298e-06, + "loss": 0.5491, + "step": 11118 + }, + { + "epoch": 0.458818189320789, + "grad_norm": 2.5473169684387194, + "learning_rate": 1.7715428731710275e-06, + "loss": 0.5509, + "step": 11119 + }, + { + "epoch": 0.4588594536601469, + "grad_norm": 4.844138616708796, + "learning_rate": 1.7713457070386901e-06, + "loss": 0.4734, + "step": 11120 + }, + { + "epoch": 0.4589007179995048, + "grad_norm": 5.104059451070542, + "learning_rate": 1.7711485360594396e-06, + "loss": 0.5499, + "step": 11121 + }, + { + "epoch": 0.45894198233886274, + "grad_norm": 2.7274066304608735, + "learning_rate": 1.770951360236798e-06, + "loss": 0.5427, + "step": 11122 + }, + { + "epoch": 0.45898324667822066, + "grad_norm": 5.3089010392489975, + "learning_rate": 1.7707541795742872e-06, + "loss": 0.5476, + "step": 11123 + }, + { + "epoch": 0.45902451101757863, + "grad_norm": 4.087912731947011, + "learning_rate": 1.7705569940754297e-06, + "loss": 0.5513, + "step": 11124 + }, + { + "epoch": 0.45906577535693655, + "grad_norm": 3.988100562463309, + "learning_rate": 1.7703598037437474e-06, + "loss": 0.4871, + "step": 11125 + }, + { + "epoch": 0.45910703969629446, + "grad_norm": 3.2796919290767215, + "learning_rate": 1.7701626085827624e-06, + "loss": 0.545, + "step": 11126 + }, + { + "epoch": 0.4591483040356524, + "grad_norm": 2.676211214448488, + "learning_rate": 1.7699654085959975e-06, + "loss": 0.5546, + "step": 11127 + }, + { + "epoch": 0.4591895683750103, + "grad_norm": 5.569352070839614, + "learning_rate": 1.7697682037869749e-06, + "loss": 0.5296, + "step": 11128 + }, + { + "epoch": 0.45923083271436826, + "grad_norm": 2.819952735265273, + "learning_rate": 1.7695709941592174e-06, + "loss": 0.4486, + "step": 11129 + }, + { + "epoch": 0.4592720970537262, + "grad_norm": 3.3221286971186097, + "learning_rate": 1.7693737797162475e-06, + "loss": 0.5367, + "step": 11130 + }, + { + "epoch": 0.4593133613930841, + "grad_norm": 2.4688284502665137, + "learning_rate": 1.7691765604615887e-06, + "loss": 0.5094, + "step": 11131 + }, + { + "epoch": 0.459354625732442, + "grad_norm": 3.18290749434672, + "learning_rate": 1.7689793363987623e-06, + "loss": 0.6122, + "step": 11132 + }, + { + "epoch": 0.45939589007179993, + "grad_norm": 9.929838312968808, + "learning_rate": 1.7687821075312924e-06, + "loss": 0.5657, + "step": 11133 + }, + { + "epoch": 0.4594371544111579, + "grad_norm": 17.46827304180523, + "learning_rate": 1.7685848738627018e-06, + "loss": 0.5395, + "step": 11134 + }, + { + "epoch": 0.4594784187505158, + "grad_norm": 3.9942429708700025, + "learning_rate": 1.7683876353965133e-06, + "loss": 0.4758, + "step": 11135 + }, + { + "epoch": 0.45951968308987373, + "grad_norm": 2.5078612340049515, + "learning_rate": 1.7681903921362499e-06, + "loss": 0.5195, + "step": 11136 + }, + { + "epoch": 0.45956094742923165, + "grad_norm": 3.849774910537394, + "learning_rate": 1.7679931440854362e-06, + "loss": 0.5749, + "step": 11137 + }, + { + "epoch": 0.45960221176858956, + "grad_norm": 3.8343957377746984, + "learning_rate": 1.7677958912475935e-06, + "loss": 0.5399, + "step": 11138 + }, + { + "epoch": 0.45964347610794754, + "grad_norm": 2.8792142363747857, + "learning_rate": 1.7675986336262467e-06, + "loss": 0.5474, + "step": 11139 + }, + { + "epoch": 0.45968474044730545, + "grad_norm": 2.0510972389013404, + "learning_rate": 1.7674013712249188e-06, + "loss": 0.5185, + "step": 11140 + }, + { + "epoch": 0.45972600478666337, + "grad_norm": 1.990077340485488, + "learning_rate": 1.7672041040471334e-06, + "loss": 0.483, + "step": 11141 + }, + { + "epoch": 0.4597672691260213, + "grad_norm": 3.7942946587097817, + "learning_rate": 1.7670068320964141e-06, + "loss": 0.498, + "step": 11142 + }, + { + "epoch": 0.4598085334653792, + "grad_norm": 4.709326944753181, + "learning_rate": 1.7668095553762853e-06, + "loss": 0.5297, + "step": 11143 + }, + { + "epoch": 0.45984979780473717, + "grad_norm": 2.9299246553738674, + "learning_rate": 1.76661227389027e-06, + "loss": 0.5024, + "step": 11144 + }, + { + "epoch": 0.4598910621440951, + "grad_norm": 2.6418769074688155, + "learning_rate": 1.766414987641893e-06, + "loss": 0.4643, + "step": 11145 + }, + { + "epoch": 0.459932326483453, + "grad_norm": 5.5689074958594595, + "learning_rate": 1.7662176966346773e-06, + "loss": 0.5288, + "step": 11146 + }, + { + "epoch": 0.4599735908228109, + "grad_norm": 9.495629994241535, + "learning_rate": 1.766020400872148e-06, + "loss": 0.5344, + "step": 11147 + }, + { + "epoch": 0.46001485516216883, + "grad_norm": 2.1332948305457595, + "learning_rate": 1.7658231003578283e-06, + "loss": 0.5241, + "step": 11148 + }, + { + "epoch": 0.4600561195015268, + "grad_norm": 3.374201948523966, + "learning_rate": 1.7656257950952435e-06, + "loss": 0.5286, + "step": 11149 + }, + { + "epoch": 0.4600973838408847, + "grad_norm": 7.006648540684682, + "learning_rate": 1.7654284850879175e-06, + "loss": 0.6152, + "step": 11150 + }, + { + "epoch": 0.46013864818024264, + "grad_norm": 3.53052871751234, + "learning_rate": 1.7652311703393748e-06, + "loss": 0.5256, + "step": 11151 + }, + { + "epoch": 0.46017991251960055, + "grad_norm": 2.6798389582092397, + "learning_rate": 1.7650338508531394e-06, + "loss": 0.5337, + "step": 11152 + }, + { + "epoch": 0.46022117685895847, + "grad_norm": 4.936696593610338, + "learning_rate": 1.764836526632737e-06, + "loss": 0.5649, + "step": 11153 + }, + { + "epoch": 0.46026244119831644, + "grad_norm": 9.614795432758857, + "learning_rate": 1.7646391976816913e-06, + "loss": 0.4827, + "step": 11154 + }, + { + "epoch": 0.46030370553767436, + "grad_norm": 2.9476776446599544, + "learning_rate": 1.7644418640035277e-06, + "loss": 0.5847, + "step": 11155 + }, + { + "epoch": 0.4603449698770323, + "grad_norm": 2.349117013955853, + "learning_rate": 1.7642445256017708e-06, + "loss": 0.5366, + "step": 11156 + }, + { + "epoch": 0.4603862342163902, + "grad_norm": 3.7210279494454497, + "learning_rate": 1.7640471824799456e-06, + "loss": 0.5194, + "step": 11157 + }, + { + "epoch": 0.4604274985557481, + "grad_norm": 4.05043624274273, + "learning_rate": 1.7638498346415774e-06, + "loss": 0.5105, + "step": 11158 + }, + { + "epoch": 0.4604687628951061, + "grad_norm": 4.7908000720465935, + "learning_rate": 1.7636524820901907e-06, + "loss": 0.533, + "step": 11159 + }, + { + "epoch": 0.460510027234464, + "grad_norm": 7.132230213604007, + "learning_rate": 1.7634551248293115e-06, + "loss": 0.4915, + "step": 11160 + }, + { + "epoch": 0.4605512915738219, + "grad_norm": 6.8717446857071405, + "learning_rate": 1.7632577628624646e-06, + "loss": 0.4644, + "step": 11161 + }, + { + "epoch": 0.4605925559131798, + "grad_norm": 4.014133987767375, + "learning_rate": 1.7630603961931754e-06, + "loss": 0.5669, + "step": 11162 + }, + { + "epoch": 0.46063382025253774, + "grad_norm": 5.7389409385314085, + "learning_rate": 1.7628630248249697e-06, + "loss": 0.5697, + "step": 11163 + }, + { + "epoch": 0.46067508459189566, + "grad_norm": 3.266327480999329, + "learning_rate": 1.762665648761373e-06, + "loss": 0.4982, + "step": 11164 + }, + { + "epoch": 0.4607163489312536, + "grad_norm": 2.132848346029163, + "learning_rate": 1.7624682680059097e-06, + "loss": 0.544, + "step": 11165 + }, + { + "epoch": 0.46075761327061154, + "grad_norm": 3.3369847220589484, + "learning_rate": 1.7622708825621073e-06, + "loss": 0.5133, + "step": 11166 + }, + { + "epoch": 0.46079887760996946, + "grad_norm": 33.004895420905086, + "learning_rate": 1.7620734924334907e-06, + "loss": 0.459, + "step": 11167 + }, + { + "epoch": 0.4608401419493274, + "grad_norm": 3.0427206869623173, + "learning_rate": 1.7618760976235861e-06, + "loss": 0.5193, + "step": 11168 + }, + { + "epoch": 0.4608814062886853, + "grad_norm": 2.5928955142997068, + "learning_rate": 1.761678698135919e-06, + "loss": 0.4984, + "step": 11169 + }, + { + "epoch": 0.46092267062804326, + "grad_norm": 3.9312953060713873, + "learning_rate": 1.761481293974016e-06, + "loss": 0.5587, + "step": 11170 + }, + { + "epoch": 0.4609639349674012, + "grad_norm": 3.0220450763767737, + "learning_rate": 1.7612838851414027e-06, + "loss": 0.5595, + "step": 11171 + }, + { + "epoch": 0.4610051993067591, + "grad_norm": 5.600461787440552, + "learning_rate": 1.7610864716416058e-06, + "loss": 0.5632, + "step": 11172 + }, + { + "epoch": 0.461046463646117, + "grad_norm": 2.7139567136579537, + "learning_rate": 1.7608890534781509e-06, + "loss": 0.5779, + "step": 11173 + }, + { + "epoch": 0.4610877279854749, + "grad_norm": 5.017611230661919, + "learning_rate": 1.7606916306545653e-06, + "loss": 0.5204, + "step": 11174 + }, + { + "epoch": 0.4611289923248329, + "grad_norm": 5.053719089575336, + "learning_rate": 1.7604942031743748e-06, + "loss": 0.5879, + "step": 11175 + }, + { + "epoch": 0.4611702566641908, + "grad_norm": 2.4738343846993667, + "learning_rate": 1.7602967710411064e-06, + "loss": 0.5462, + "step": 11176 + }, + { + "epoch": 0.46121152100354873, + "grad_norm": 12.585071492993814, + "learning_rate": 1.7600993342582866e-06, + "loss": 0.5576, + "step": 11177 + }, + { + "epoch": 0.46125278534290665, + "grad_norm": 4.271946113178658, + "learning_rate": 1.759901892829442e-06, + "loss": 0.5041, + "step": 11178 + }, + { + "epoch": 0.46129404968226456, + "grad_norm": 21.33801061659549, + "learning_rate": 1.7597044467580992e-06, + "loss": 0.5697, + "step": 11179 + }, + { + "epoch": 0.46133531402162253, + "grad_norm": 2.6925567875726735, + "learning_rate": 1.7595069960477852e-06, + "loss": 0.5221, + "step": 11180 + }, + { + "epoch": 0.46137657836098045, + "grad_norm": 2.9835399580110495, + "learning_rate": 1.7593095407020272e-06, + "loss": 0.4921, + "step": 11181 + }, + { + "epoch": 0.46141784270033837, + "grad_norm": 2.676718880918298, + "learning_rate": 1.759112080724352e-06, + "loss": 0.5173, + "step": 11182 + }, + { + "epoch": 0.4614591070396963, + "grad_norm": 3.1644267960876187, + "learning_rate": 1.7589146161182872e-06, + "loss": 0.5267, + "step": 11183 + }, + { + "epoch": 0.4615003713790542, + "grad_norm": 2.1856033815212847, + "learning_rate": 1.7587171468873595e-06, + "loss": 0.5343, + "step": 11184 + }, + { + "epoch": 0.46154163571841217, + "grad_norm": 4.300033854534478, + "learning_rate": 1.7585196730350963e-06, + "loss": 0.508, + "step": 11185 + }, + { + "epoch": 0.4615829000577701, + "grad_norm": 4.125156162411396, + "learning_rate": 1.758322194565025e-06, + "loss": 0.5133, + "step": 11186 + }, + { + "epoch": 0.461624164397128, + "grad_norm": 3.8301249490669886, + "learning_rate": 1.7581247114806727e-06, + "loss": 0.5571, + "step": 11187 + }, + { + "epoch": 0.4616654287364859, + "grad_norm": 4.746054669676935, + "learning_rate": 1.7579272237855681e-06, + "loss": 0.5725, + "step": 11188 + }, + { + "epoch": 0.46170669307584383, + "grad_norm": 2.9140609301012756, + "learning_rate": 1.7577297314832373e-06, + "loss": 0.5007, + "step": 11189 + }, + { + "epoch": 0.4617479574152018, + "grad_norm": 2.5767014793299072, + "learning_rate": 1.7575322345772095e-06, + "loss": 0.558, + "step": 11190 + }, + { + "epoch": 0.4617892217545597, + "grad_norm": 2.8930252659889364, + "learning_rate": 1.757334733071011e-06, + "loss": 0.5501, + "step": 11191 + }, + { + "epoch": 0.46183048609391764, + "grad_norm": 3.056325314192585, + "learning_rate": 1.7571372269681712e-06, + "loss": 0.5259, + "step": 11192 + }, + { + "epoch": 0.46187175043327555, + "grad_norm": 3.843510452372127, + "learning_rate": 1.7569397162722164e-06, + "loss": 0.597, + "step": 11193 + }, + { + "epoch": 0.46191301477263347, + "grad_norm": 2.5798253670594495, + "learning_rate": 1.7567422009866763e-06, + "loss": 0.4915, + "step": 11194 + }, + { + "epoch": 0.46195427911199144, + "grad_norm": 2.0101049710638463, + "learning_rate": 1.7565446811150777e-06, + "loss": 0.4905, + "step": 11195 + }, + { + "epoch": 0.46199554345134936, + "grad_norm": 10.40157532374555, + "learning_rate": 1.7563471566609493e-06, + "loss": 0.5642, + "step": 11196 + }, + { + "epoch": 0.46203680779070727, + "grad_norm": 4.872993477825161, + "learning_rate": 1.7561496276278197e-06, + "loss": 0.5005, + "step": 11197 + }, + { + "epoch": 0.4620780721300652, + "grad_norm": 2.2502608989923383, + "learning_rate": 1.7559520940192165e-06, + "loss": 0.5438, + "step": 11198 + }, + { + "epoch": 0.4621193364694231, + "grad_norm": 2.514106663565442, + "learning_rate": 1.7557545558386691e-06, + "loss": 0.5281, + "step": 11199 + }, + { + "epoch": 0.4621606008087811, + "grad_norm": 3.5145748590064305, + "learning_rate": 1.7555570130897052e-06, + "loss": 0.5322, + "step": 11200 + }, + { + "epoch": 0.462201865148139, + "grad_norm": 3.0857193033184913, + "learning_rate": 1.755359465775854e-06, + "loss": 0.475, + "step": 11201 + }, + { + "epoch": 0.4622431294874969, + "grad_norm": 3.5075752724334635, + "learning_rate": 1.755161913900644e-06, + "loss": 0.5298, + "step": 11202 + }, + { + "epoch": 0.4622843938268548, + "grad_norm": 5.960820471249095, + "learning_rate": 1.7549643574676038e-06, + "loss": 0.5612, + "step": 11203 + }, + { + "epoch": 0.46232565816621274, + "grad_norm": 2.10937981595588, + "learning_rate": 1.7547667964802623e-06, + "loss": 0.5253, + "step": 11204 + }, + { + "epoch": 0.4623669225055707, + "grad_norm": 2.940178634962035, + "learning_rate": 1.7545692309421485e-06, + "loss": 0.5062, + "step": 11205 + }, + { + "epoch": 0.4624081868449286, + "grad_norm": 17.851097237049675, + "learning_rate": 1.7543716608567912e-06, + "loss": 0.5644, + "step": 11206 + }, + { + "epoch": 0.46244945118428654, + "grad_norm": 3.55944026290329, + "learning_rate": 1.75417408622772e-06, + "loss": 0.5163, + "step": 11207 + }, + { + "epoch": 0.46249071552364446, + "grad_norm": 3.8741903029753497, + "learning_rate": 1.7539765070584631e-06, + "loss": 0.5893, + "step": 11208 + }, + { + "epoch": 0.4625319798630024, + "grad_norm": 2.4888334521387883, + "learning_rate": 1.7537789233525513e-06, + "loss": 0.5335, + "step": 11209 + }, + { + "epoch": 0.46257324420236035, + "grad_norm": 4.828976821808875, + "learning_rate": 1.753581335113513e-06, + "loss": 0.4839, + "step": 11210 + }, + { + "epoch": 0.46261450854171826, + "grad_norm": 3.0447493286118044, + "learning_rate": 1.7533837423448776e-06, + "loss": 0.5925, + "step": 11211 + }, + { + "epoch": 0.4626557728810762, + "grad_norm": 6.219214270614554, + "learning_rate": 1.7531861450501745e-06, + "loss": 0.5685, + "step": 11212 + }, + { + "epoch": 0.4626970372204341, + "grad_norm": 2.8481878186471206, + "learning_rate": 1.7529885432329335e-06, + "loss": 0.5791, + "step": 11213 + }, + { + "epoch": 0.462738301559792, + "grad_norm": 3.7760337863758364, + "learning_rate": 1.752790936896684e-06, + "loss": 0.4903, + "step": 11214 + }, + { + "epoch": 0.46277956589915, + "grad_norm": 5.5389626271463746, + "learning_rate": 1.7525933260449562e-06, + "loss": 0.5511, + "step": 11215 + }, + { + "epoch": 0.4628208302385079, + "grad_norm": 3.852858300670651, + "learning_rate": 1.75239571068128e-06, + "loss": 0.5303, + "step": 11216 + }, + { + "epoch": 0.4628620945778658, + "grad_norm": 5.3597118187900685, + "learning_rate": 1.7521980908091847e-06, + "loss": 0.5496, + "step": 11217 + }, + { + "epoch": 0.46290335891722373, + "grad_norm": 3.3091178958307483, + "learning_rate": 1.7520004664322004e-06, + "loss": 0.53, + "step": 11218 + }, + { + "epoch": 0.46294462325658164, + "grad_norm": 2.8634324346885345, + "learning_rate": 1.7518028375538578e-06, + "loss": 0.4987, + "step": 11219 + }, + { + "epoch": 0.4629858875959396, + "grad_norm": 7.295075206298909, + "learning_rate": 1.7516052041776865e-06, + "loss": 0.5464, + "step": 11220 + }, + { + "epoch": 0.46302715193529753, + "grad_norm": 2.3986007537181053, + "learning_rate": 1.7514075663072167e-06, + "loss": 0.5457, + "step": 11221 + }, + { + "epoch": 0.46306841627465545, + "grad_norm": 4.116512184699632, + "learning_rate": 1.7512099239459786e-06, + "loss": 0.4917, + "step": 11222 + }, + { + "epoch": 0.46310968061401336, + "grad_norm": 3.0174802331378525, + "learning_rate": 1.7510122770975037e-06, + "loss": 0.5488, + "step": 11223 + }, + { + "epoch": 0.4631509449533713, + "grad_norm": 4.791731102267556, + "learning_rate": 1.7508146257653206e-06, + "loss": 0.5094, + "step": 11224 + }, + { + "epoch": 0.46319220929272925, + "grad_norm": 7.348542390539868, + "learning_rate": 1.7506169699529613e-06, + "loss": 0.4939, + "step": 11225 + }, + { + "epoch": 0.46323347363208717, + "grad_norm": 5.806760206464564, + "learning_rate": 1.750419309663956e-06, + "loss": 0.5503, + "step": 11226 + }, + { + "epoch": 0.4632747379714451, + "grad_norm": 3.797822360351718, + "learning_rate": 1.7502216449018353e-06, + "loss": 0.4772, + "step": 11227 + }, + { + "epoch": 0.463316002310803, + "grad_norm": 28.507356322257948, + "learning_rate": 1.7500239756701297e-06, + "loss": 0.5804, + "step": 11228 + }, + { + "epoch": 0.4633572666501609, + "grad_norm": 5.623379415236086, + "learning_rate": 1.7498263019723713e-06, + "loss": 0.508, + "step": 11229 + }, + { + "epoch": 0.46339853098951883, + "grad_norm": 3.1197939404937127, + "learning_rate": 1.7496286238120894e-06, + "loss": 0.5413, + "step": 11230 + }, + { + "epoch": 0.4634397953288768, + "grad_norm": 2.9389541142214726, + "learning_rate": 1.7494309411928165e-06, + "loss": 0.5456, + "step": 11231 + }, + { + "epoch": 0.4634810596682347, + "grad_norm": 3.9229120416885412, + "learning_rate": 1.7492332541180822e-06, + "loss": 0.5033, + "step": 11232 + }, + { + "epoch": 0.46352232400759263, + "grad_norm": 2.6470300176690733, + "learning_rate": 1.7490355625914189e-06, + "loss": 0.5464, + "step": 11233 + }, + { + "epoch": 0.46356358834695055, + "grad_norm": 3.4596015290573954, + "learning_rate": 1.748837866616357e-06, + "loss": 0.5768, + "step": 11234 + }, + { + "epoch": 0.46360485268630847, + "grad_norm": 4.39716446754034, + "learning_rate": 1.7486401661964292e-06, + "loss": 0.5096, + "step": 11235 + }, + { + "epoch": 0.46364611702566644, + "grad_norm": 2.9036479361817045, + "learning_rate": 1.7484424613351657e-06, + "loss": 0.529, + "step": 11236 + }, + { + "epoch": 0.46368738136502435, + "grad_norm": 3.508353516197661, + "learning_rate": 1.748244752036098e-06, + "loss": 0.5445, + "step": 11237 + }, + { + "epoch": 0.46372864570438227, + "grad_norm": 2.7984724081855576, + "learning_rate": 1.7480470383027582e-06, + "loss": 0.5663, + "step": 11238 + }, + { + "epoch": 0.4637699100437402, + "grad_norm": 2.901278503127336, + "learning_rate": 1.7478493201386775e-06, + "loss": 0.5543, + "step": 11239 + }, + { + "epoch": 0.4638111743830981, + "grad_norm": 2.0696387888466634, + "learning_rate": 1.7476515975473886e-06, + "loss": 0.5099, + "step": 11240 + }, + { + "epoch": 0.4638524387224561, + "grad_norm": 5.113819402231415, + "learning_rate": 1.7474538705324224e-06, + "loss": 0.5811, + "step": 11241 + }, + { + "epoch": 0.463893703061814, + "grad_norm": 4.715597427112598, + "learning_rate": 1.747256139097311e-06, + "loss": 0.5358, + "step": 11242 + }, + { + "epoch": 0.4639349674011719, + "grad_norm": 2.8496994509958222, + "learning_rate": 1.747058403245586e-06, + "loss": 0.5577, + "step": 11243 + }, + { + "epoch": 0.4639762317405298, + "grad_norm": 2.14833046850968, + "learning_rate": 1.7468606629807803e-06, + "loss": 0.5107, + "step": 11244 + }, + { + "epoch": 0.46401749607988774, + "grad_norm": 8.869483880079525, + "learning_rate": 1.746662918306425e-06, + "loss": 0.5454, + "step": 11245 + }, + { + "epoch": 0.4640587604192457, + "grad_norm": 2.4794030676119814, + "learning_rate": 1.7464651692260537e-06, + "loss": 0.5371, + "step": 11246 + }, + { + "epoch": 0.4641000247586036, + "grad_norm": 3.415467210145503, + "learning_rate": 1.7462674157431975e-06, + "loss": 0.5406, + "step": 11247 + }, + { + "epoch": 0.46414128909796154, + "grad_norm": 5.244650685122435, + "learning_rate": 1.7460696578613894e-06, + "loss": 0.4921, + "step": 11248 + }, + { + "epoch": 0.46418255343731946, + "grad_norm": 4.060858481174489, + "learning_rate": 1.7458718955841617e-06, + "loss": 0.5351, + "step": 11249 + }, + { + "epoch": 0.46422381777667737, + "grad_norm": 3.6820318446018208, + "learning_rate": 1.7456741289150465e-06, + "loss": 0.516, + "step": 11250 + }, + { + "epoch": 0.46426508211603534, + "grad_norm": 6.769454272223522, + "learning_rate": 1.7454763578575772e-06, + "loss": 0.5634, + "step": 11251 + }, + { + "epoch": 0.46430634645539326, + "grad_norm": 3.43757383589389, + "learning_rate": 1.745278582415286e-06, + "loss": 0.614, + "step": 11252 + }, + { + "epoch": 0.4643476107947512, + "grad_norm": 4.328075023630992, + "learning_rate": 1.7450808025917053e-06, + "loss": 0.4923, + "step": 11253 + }, + { + "epoch": 0.4643888751341091, + "grad_norm": 2.3759637512853096, + "learning_rate": 1.7448830183903685e-06, + "loss": 0.5628, + "step": 11254 + }, + { + "epoch": 0.464430139473467, + "grad_norm": 14.883404851651457, + "learning_rate": 1.7446852298148085e-06, + "loss": 0.5466, + "step": 11255 + }, + { + "epoch": 0.464471403812825, + "grad_norm": 2.2382163043615435, + "learning_rate": 1.7444874368685585e-06, + "loss": 0.5391, + "step": 11256 + }, + { + "epoch": 0.4645126681521829, + "grad_norm": 4.449187959498618, + "learning_rate": 1.744289639555151e-06, + "loss": 0.4982, + "step": 11257 + }, + { + "epoch": 0.4645539324915408, + "grad_norm": 4.104567099477572, + "learning_rate": 1.7440918378781194e-06, + "loss": 0.5598, + "step": 11258 + }, + { + "epoch": 0.4645951968308987, + "grad_norm": 2.627126968636976, + "learning_rate": 1.7438940318409968e-06, + "loss": 0.5121, + "step": 11259 + }, + { + "epoch": 0.46463646117025664, + "grad_norm": 2.494828955053867, + "learning_rate": 1.7436962214473167e-06, + "loss": 0.5319, + "step": 11260 + }, + { + "epoch": 0.4646777255096146, + "grad_norm": 2.9137111546274626, + "learning_rate": 1.7434984067006123e-06, + "loss": 0.5388, + "step": 11261 + }, + { + "epoch": 0.46471898984897253, + "grad_norm": 2.986955545740825, + "learning_rate": 1.7433005876044184e-06, + "loss": 0.5234, + "step": 11262 + }, + { + "epoch": 0.46476025418833045, + "grad_norm": 9.102479090913297, + "learning_rate": 1.743102764162266e-06, + "loss": 0.5848, + "step": 11263 + }, + { + "epoch": 0.46480151852768836, + "grad_norm": 4.196418261555508, + "learning_rate": 1.7429049363776905e-06, + "loss": 0.5456, + "step": 11264 + }, + { + "epoch": 0.4648427828670463, + "grad_norm": 4.704030208764191, + "learning_rate": 1.7427071042542253e-06, + "loss": 0.5286, + "step": 11265 + }, + { + "epoch": 0.46488404720640425, + "grad_norm": 4.920875504940074, + "learning_rate": 1.7425092677954038e-06, + "loss": 0.517, + "step": 11266 + }, + { + "epoch": 0.46492531154576217, + "grad_norm": 2.476095523415838, + "learning_rate": 1.7423114270047602e-06, + "loss": 0.5568, + "step": 11267 + }, + { + "epoch": 0.4649665758851201, + "grad_norm": 6.609977147165465, + "learning_rate": 1.7421135818858288e-06, + "loss": 0.5515, + "step": 11268 + }, + { + "epoch": 0.465007840224478, + "grad_norm": 3.328748762470863, + "learning_rate": 1.7419157324421428e-06, + "loss": 0.5143, + "step": 11269 + }, + { + "epoch": 0.4650491045638359, + "grad_norm": 6.125507453663761, + "learning_rate": 1.7417178786772363e-06, + "loss": 0.4737, + "step": 11270 + }, + { + "epoch": 0.4650903689031939, + "grad_norm": 2.6163530096672836, + "learning_rate": 1.7415200205946442e-06, + "loss": 0.5306, + "step": 11271 + }, + { + "epoch": 0.4651316332425518, + "grad_norm": 2.524702541510005, + "learning_rate": 1.7413221581979002e-06, + "loss": 0.5339, + "step": 11272 + }, + { + "epoch": 0.4651728975819097, + "grad_norm": 28.24329344074094, + "learning_rate": 1.7411242914905389e-06, + "loss": 0.4713, + "step": 11273 + }, + { + "epoch": 0.46521416192126763, + "grad_norm": 3.6499606153802207, + "learning_rate": 1.7409264204760944e-06, + "loss": 0.5049, + "step": 11274 + }, + { + "epoch": 0.46525542626062555, + "grad_norm": 1.9885007811055304, + "learning_rate": 1.7407285451581015e-06, + "loss": 0.4935, + "step": 11275 + }, + { + "epoch": 0.4652966905999835, + "grad_norm": 3.106605677757373, + "learning_rate": 1.7405306655400941e-06, + "loss": 0.5653, + "step": 11276 + }, + { + "epoch": 0.46533795493934144, + "grad_norm": 12.764800537349535, + "learning_rate": 1.7403327816256078e-06, + "loss": 0.5272, + "step": 11277 + }, + { + "epoch": 0.46537921927869935, + "grad_norm": 2.2160319028177202, + "learning_rate": 1.7401348934181765e-06, + "loss": 0.5493, + "step": 11278 + }, + { + "epoch": 0.46542048361805727, + "grad_norm": 3.019586703162635, + "learning_rate": 1.7399370009213358e-06, + "loss": 0.5222, + "step": 11279 + }, + { + "epoch": 0.4654617479574152, + "grad_norm": 3.3933613600597963, + "learning_rate": 1.7397391041386195e-06, + "loss": 0.5937, + "step": 11280 + }, + { + "epoch": 0.46550301229677316, + "grad_norm": 6.098837284019227, + "learning_rate": 1.7395412030735634e-06, + "loss": 0.5304, + "step": 11281 + }, + { + "epoch": 0.46554427663613107, + "grad_norm": 3.6297051307937545, + "learning_rate": 1.739343297729702e-06, + "loss": 0.5136, + "step": 11282 + }, + { + "epoch": 0.465585540975489, + "grad_norm": 3.3994253915144603, + "learning_rate": 1.7391453881105707e-06, + "loss": 0.5268, + "step": 11283 + }, + { + "epoch": 0.4656268053148469, + "grad_norm": 7.377815029835062, + "learning_rate": 1.738947474219704e-06, + "loss": 0.5352, + "step": 11284 + }, + { + "epoch": 0.4656680696542048, + "grad_norm": 2.789763016390185, + "learning_rate": 1.7387495560606382e-06, + "loss": 0.5196, + "step": 11285 + }, + { + "epoch": 0.4657093339935628, + "grad_norm": 3.058059806422684, + "learning_rate": 1.7385516336369077e-06, + "loss": 0.477, + "step": 11286 + }, + { + "epoch": 0.4657505983329207, + "grad_norm": 2.860706264218761, + "learning_rate": 1.7383537069520487e-06, + "loss": 0.5849, + "step": 11287 + }, + { + "epoch": 0.4657918626722786, + "grad_norm": 4.6537742162880384, + "learning_rate": 1.738155776009596e-06, + "loss": 0.5408, + "step": 11288 + }, + { + "epoch": 0.46583312701163654, + "grad_norm": 5.083244784701265, + "learning_rate": 1.7379578408130856e-06, + "loss": 0.5586, + "step": 11289 + }, + { + "epoch": 0.46587439135099445, + "grad_norm": 2.1517101174149227, + "learning_rate": 1.7377599013660523e-06, + "loss": 0.5895, + "step": 11290 + }, + { + "epoch": 0.46591565569035237, + "grad_norm": 4.570457839286889, + "learning_rate": 1.7375619576720328e-06, + "loss": 0.4791, + "step": 11291 + }, + { + "epoch": 0.46595692002971034, + "grad_norm": 2.0702037221698952, + "learning_rate": 1.7373640097345623e-06, + "loss": 0.5482, + "step": 11292 + }, + { + "epoch": 0.46599818436906826, + "grad_norm": 3.132681916938624, + "learning_rate": 1.7371660575571769e-06, + "loss": 0.5518, + "step": 11293 + }, + { + "epoch": 0.4660394487084262, + "grad_norm": 3.1250442800948104, + "learning_rate": 1.736968101143412e-06, + "loss": 0.5734, + "step": 11294 + }, + { + "epoch": 0.4660807130477841, + "grad_norm": 3.52831900282003, + "learning_rate": 1.7367701404968049e-06, + "loss": 0.542, + "step": 11295 + }, + { + "epoch": 0.466121977387142, + "grad_norm": 8.831125520503926, + "learning_rate": 1.7365721756208902e-06, + "loss": 0.4987, + "step": 11296 + }, + { + "epoch": 0.4661632417265, + "grad_norm": 2.6135298886790803, + "learning_rate": 1.7363742065192048e-06, + "loss": 0.5572, + "step": 11297 + }, + { + "epoch": 0.4662045060658579, + "grad_norm": 3.922167340458305, + "learning_rate": 1.7361762331952844e-06, + "loss": 0.599, + "step": 11298 + }, + { + "epoch": 0.4662457704052158, + "grad_norm": 2.680536031441359, + "learning_rate": 1.735978255652666e-06, + "loss": 0.5356, + "step": 11299 + }, + { + "epoch": 0.4662870347445737, + "grad_norm": 4.652311080315025, + "learning_rate": 1.7357802738948853e-06, + "loss": 0.6037, + "step": 11300 + }, + { + "epoch": 0.46632829908393164, + "grad_norm": 3.0655568016495574, + "learning_rate": 1.7355822879254794e-06, + "loss": 0.525, + "step": 11301 + }, + { + "epoch": 0.4663695634232896, + "grad_norm": 10.931739541910511, + "learning_rate": 1.7353842977479846e-06, + "loss": 0.4897, + "step": 11302 + }, + { + "epoch": 0.46641082776264753, + "grad_norm": 5.7708096987018696, + "learning_rate": 1.7351863033659372e-06, + "loss": 0.4999, + "step": 11303 + }, + { + "epoch": 0.46645209210200544, + "grad_norm": 2.0116191868870983, + "learning_rate": 1.734988304782874e-06, + "loss": 0.5185, + "step": 11304 + }, + { + "epoch": 0.46649335644136336, + "grad_norm": 2.8537852322493475, + "learning_rate": 1.734790302002332e-06, + "loss": 0.5189, + "step": 11305 + }, + { + "epoch": 0.4665346207807213, + "grad_norm": 4.511180524958418, + "learning_rate": 1.7345922950278475e-06, + "loss": 0.5331, + "step": 11306 + }, + { + "epoch": 0.46657588512007925, + "grad_norm": 3.42305518713975, + "learning_rate": 1.7343942838629582e-06, + "loss": 0.5279, + "step": 11307 + }, + { + "epoch": 0.46661714945943716, + "grad_norm": 3.7818643785290966, + "learning_rate": 1.7341962685112005e-06, + "loss": 0.5817, + "step": 11308 + }, + { + "epoch": 0.4666584137987951, + "grad_norm": 10.362224730944142, + "learning_rate": 1.7339982489761117e-06, + "loss": 0.551, + "step": 11309 + }, + { + "epoch": 0.466699678138153, + "grad_norm": 7.967233029377257, + "learning_rate": 1.7338002252612282e-06, + "loss": 0.5058, + "step": 11310 + }, + { + "epoch": 0.4667409424775109, + "grad_norm": 2.659270878948051, + "learning_rate": 1.7336021973700884e-06, + "loss": 0.5275, + "step": 11311 + }, + { + "epoch": 0.4667822068168689, + "grad_norm": 2.4733687572390286, + "learning_rate": 1.7334041653062286e-06, + "loss": 0.5298, + "step": 11312 + }, + { + "epoch": 0.4668234711562268, + "grad_norm": 5.389073117567779, + "learning_rate": 1.733206129073187e-06, + "loss": 0.5228, + "step": 11313 + }, + { + "epoch": 0.4668647354955847, + "grad_norm": 4.095564476010608, + "learning_rate": 1.7330080886744998e-06, + "loss": 0.5552, + "step": 11314 + }, + { + "epoch": 0.46690599983494263, + "grad_norm": 3.383068079690536, + "learning_rate": 1.7328100441137059e-06, + "loss": 0.5523, + "step": 11315 + }, + { + "epoch": 0.46694726417430055, + "grad_norm": 30.275919578617916, + "learning_rate": 1.7326119953943423e-06, + "loss": 0.5856, + "step": 11316 + }, + { + "epoch": 0.4669885285136585, + "grad_norm": 8.933775216203264, + "learning_rate": 1.7324139425199458e-06, + "loss": 0.503, + "step": 11317 + }, + { + "epoch": 0.46702979285301643, + "grad_norm": 2.283604785379525, + "learning_rate": 1.7322158854940557e-06, + "loss": 0.4848, + "step": 11318 + }, + { + "epoch": 0.46707105719237435, + "grad_norm": 3.3137748884631604, + "learning_rate": 1.7320178243202087e-06, + "loss": 0.5299, + "step": 11319 + }, + { + "epoch": 0.46711232153173227, + "grad_norm": 4.645848569877614, + "learning_rate": 1.7318197590019425e-06, + "loss": 0.5621, + "step": 11320 + }, + { + "epoch": 0.4671535858710902, + "grad_norm": 5.247562260532427, + "learning_rate": 1.731621689542796e-06, + "loss": 0.5678, + "step": 11321 + }, + { + "epoch": 0.46719485021044815, + "grad_norm": 3.5367617676016136, + "learning_rate": 1.7314236159463063e-06, + "loss": 0.5261, + "step": 11322 + }, + { + "epoch": 0.46723611454980607, + "grad_norm": 3.3502174189354905, + "learning_rate": 1.7312255382160122e-06, + "loss": 0.5277, + "step": 11323 + }, + { + "epoch": 0.467277378889164, + "grad_norm": 3.835742107288274, + "learning_rate": 1.7310274563554515e-06, + "loss": 0.5376, + "step": 11324 + }, + { + "epoch": 0.4673186432285219, + "grad_norm": 6.463506837366099, + "learning_rate": 1.730829370368162e-06, + "loss": 0.6123, + "step": 11325 + }, + { + "epoch": 0.4673599075678798, + "grad_norm": 2.2922030707853485, + "learning_rate": 1.7306312802576833e-06, + "loss": 0.5587, + "step": 11326 + }, + { + "epoch": 0.4674011719072378, + "grad_norm": 2.591809578821069, + "learning_rate": 1.7304331860275524e-06, + "loss": 0.4613, + "step": 11327 + }, + { + "epoch": 0.4674424362465957, + "grad_norm": 22.486072068550385, + "learning_rate": 1.7302350876813087e-06, + "loss": 0.5012, + "step": 11328 + }, + { + "epoch": 0.4674837005859536, + "grad_norm": 1.9533823160023118, + "learning_rate": 1.7300369852224903e-06, + "loss": 0.5115, + "step": 11329 + }, + { + "epoch": 0.46752496492531154, + "grad_norm": 3.079535505061202, + "learning_rate": 1.7298388786546357e-06, + "loss": 0.5489, + "step": 11330 + }, + { + "epoch": 0.46756622926466945, + "grad_norm": 2.754381724381136, + "learning_rate": 1.729640767981284e-06, + "loss": 0.5354, + "step": 11331 + }, + { + "epoch": 0.4676074936040274, + "grad_norm": 2.935774139349548, + "learning_rate": 1.7294426532059736e-06, + "loss": 0.5415, + "step": 11332 + }, + { + "epoch": 0.46764875794338534, + "grad_norm": 2.9264390676162404, + "learning_rate": 1.729244534332243e-06, + "loss": 0.5407, + "step": 11333 + }, + { + "epoch": 0.46769002228274326, + "grad_norm": 1.9004239695042326, + "learning_rate": 1.7290464113636323e-06, + "loss": 0.5551, + "step": 11334 + }, + { + "epoch": 0.46773128662210117, + "grad_norm": 3.5693952984811164, + "learning_rate": 1.7288482843036794e-06, + "loss": 0.5225, + "step": 11335 + }, + { + "epoch": 0.4677725509614591, + "grad_norm": 2.361313230390156, + "learning_rate": 1.7286501531559237e-06, + "loss": 0.5214, + "step": 11336 + }, + { + "epoch": 0.46781381530081706, + "grad_norm": 4.16537758052169, + "learning_rate": 1.7284520179239039e-06, + "loss": 0.5795, + "step": 11337 + }, + { + "epoch": 0.467855079640175, + "grad_norm": 5.70525374355958, + "learning_rate": 1.7282538786111601e-06, + "loss": 0.5277, + "step": 11338 + }, + { + "epoch": 0.4678963439795329, + "grad_norm": 6.0330193333228355, + "learning_rate": 1.7280557352212305e-06, + "loss": 0.4989, + "step": 11339 + }, + { + "epoch": 0.4679376083188908, + "grad_norm": 10.489926863750958, + "learning_rate": 1.7278575877576555e-06, + "loss": 0.5194, + "step": 11340 + }, + { + "epoch": 0.4679788726582487, + "grad_norm": 3.7595449201739, + "learning_rate": 1.7276594362239735e-06, + "loss": 0.5246, + "step": 11341 + }, + { + "epoch": 0.4680201369976067, + "grad_norm": 2.6481629098038484, + "learning_rate": 1.7274612806237247e-06, + "loss": 0.4973, + "step": 11342 + }, + { + "epoch": 0.4680614013369646, + "grad_norm": 2.135266555469265, + "learning_rate": 1.7272631209604481e-06, + "loss": 0.5601, + "step": 11343 + }, + { + "epoch": 0.4681026656763225, + "grad_norm": 4.662011682227222, + "learning_rate": 1.727064957237684e-06, + "loss": 0.4935, + "step": 11344 + }, + { + "epoch": 0.46814393001568044, + "grad_norm": 3.7673781824668224, + "learning_rate": 1.7268667894589714e-06, + "loss": 0.4579, + "step": 11345 + }, + { + "epoch": 0.46818519435503836, + "grad_norm": 3.5686526587540808, + "learning_rate": 1.7266686176278508e-06, + "loss": 0.5464, + "step": 11346 + }, + { + "epoch": 0.46822645869439633, + "grad_norm": 2.806640074817315, + "learning_rate": 1.7264704417478614e-06, + "loss": 0.5041, + "step": 11347 + }, + { + "epoch": 0.46826772303375425, + "grad_norm": 3.0664385087967703, + "learning_rate": 1.726272261822544e-06, + "loss": 0.4766, + "step": 11348 + }, + { + "epoch": 0.46830898737311216, + "grad_norm": 5.394455329087415, + "learning_rate": 1.7260740778554372e-06, + "loss": 0.5002, + "step": 11349 + }, + { + "epoch": 0.4683502517124701, + "grad_norm": 2.3758537567401943, + "learning_rate": 1.725875889850082e-06, + "loss": 0.5961, + "step": 11350 + }, + { + "epoch": 0.468391516051828, + "grad_norm": 3.0488685892474483, + "learning_rate": 1.7256776978100184e-06, + "loss": 0.5236, + "step": 11351 + }, + { + "epoch": 0.4684327803911859, + "grad_norm": 5.066788239850999, + "learning_rate": 1.7254795017387865e-06, + "loss": 0.5217, + "step": 11352 + }, + { + "epoch": 0.4684740447305439, + "grad_norm": 9.024912031504487, + "learning_rate": 1.7252813016399263e-06, + "loss": 0.5135, + "step": 11353 + }, + { + "epoch": 0.4685153090699018, + "grad_norm": 26.79983898629672, + "learning_rate": 1.725083097516979e-06, + "loss": 0.5864, + "step": 11354 + }, + { + "epoch": 0.4685565734092597, + "grad_norm": 3.449370000217046, + "learning_rate": 1.7248848893734845e-06, + "loss": 0.5318, + "step": 11355 + }, + { + "epoch": 0.46859783774861763, + "grad_norm": 4.069027552058761, + "learning_rate": 1.724686677212983e-06, + "loss": 0.5102, + "step": 11356 + }, + { + "epoch": 0.46863910208797555, + "grad_norm": 3.1403174981593933, + "learning_rate": 1.7244884610390155e-06, + "loss": 0.5378, + "step": 11357 + }, + { + "epoch": 0.4686803664273335, + "grad_norm": 2.2991824568942634, + "learning_rate": 1.7242902408551229e-06, + "loss": 0.5632, + "step": 11358 + }, + { + "epoch": 0.46872163076669143, + "grad_norm": 3.6884996623380606, + "learning_rate": 1.7240920166648448e-06, + "loss": 0.5493, + "step": 11359 + }, + { + "epoch": 0.46876289510604935, + "grad_norm": 4.800104874130227, + "learning_rate": 1.7238937884717233e-06, + "loss": 0.5262, + "step": 11360 + }, + { + "epoch": 0.46880415944540726, + "grad_norm": 2.6155775091118914, + "learning_rate": 1.7236955562792984e-06, + "loss": 0.5598, + "step": 11361 + }, + { + "epoch": 0.4688454237847652, + "grad_norm": 2.177615730215345, + "learning_rate": 1.723497320091111e-06, + "loss": 0.5448, + "step": 11362 + }, + { + "epoch": 0.46888668812412315, + "grad_norm": 7.6611979061226885, + "learning_rate": 1.7232990799107027e-06, + "loss": 0.5577, + "step": 11363 + }, + { + "epoch": 0.46892795246348107, + "grad_norm": 2.901396432516185, + "learning_rate": 1.723100835741614e-06, + "loss": 0.5165, + "step": 11364 + }, + { + "epoch": 0.468969216802839, + "grad_norm": 2.312273313239718, + "learning_rate": 1.7229025875873864e-06, + "loss": 0.5453, + "step": 11365 + }, + { + "epoch": 0.4690104811421969, + "grad_norm": 2.610995523327883, + "learning_rate": 1.7227043354515607e-06, + "loss": 0.5405, + "step": 11366 + }, + { + "epoch": 0.4690517454815548, + "grad_norm": 3.324185955329877, + "learning_rate": 1.7225060793376792e-06, + "loss": 0.5262, + "step": 11367 + }, + { + "epoch": 0.4690930098209128, + "grad_norm": 3.8525174198144256, + "learning_rate": 1.7223078192492817e-06, + "loss": 0.5758, + "step": 11368 + }, + { + "epoch": 0.4691342741602707, + "grad_norm": 3.6053400627014343, + "learning_rate": 1.722109555189911e-06, + "loss": 0.5566, + "step": 11369 + }, + { + "epoch": 0.4691755384996286, + "grad_norm": 2.41526195193815, + "learning_rate": 1.7219112871631074e-06, + "loss": 0.4821, + "step": 11370 + }, + { + "epoch": 0.46921680283898654, + "grad_norm": 3.788037381983034, + "learning_rate": 1.7217130151724134e-06, + "loss": 0.5433, + "step": 11371 + }, + { + "epoch": 0.46925806717834445, + "grad_norm": 5.265591210350445, + "learning_rate": 1.7215147392213703e-06, + "loss": 0.5597, + "step": 11372 + }, + { + "epoch": 0.4692993315177024, + "grad_norm": 4.519403397030046, + "learning_rate": 1.7213164593135197e-06, + "loss": 0.4482, + "step": 11373 + }, + { + "epoch": 0.46934059585706034, + "grad_norm": 3.255501516916439, + "learning_rate": 1.7211181754524038e-06, + "loss": 0.5281, + "step": 11374 + }, + { + "epoch": 0.46938186019641825, + "grad_norm": 6.860032000765248, + "learning_rate": 1.7209198876415643e-06, + "loss": 0.5626, + "step": 11375 + }, + { + "epoch": 0.46942312453577617, + "grad_norm": 3.856907298363175, + "learning_rate": 1.7207215958845423e-06, + "loss": 0.5034, + "step": 11376 + }, + { + "epoch": 0.4694643888751341, + "grad_norm": 3.726304768376544, + "learning_rate": 1.7205233001848805e-06, + "loss": 0.5339, + "step": 11377 + }, + { + "epoch": 0.46950565321449206, + "grad_norm": 3.166559368987182, + "learning_rate": 1.720325000546121e-06, + "loss": 0.5709, + "step": 11378 + }, + { + "epoch": 0.46954691755385, + "grad_norm": 3.6248555080132654, + "learning_rate": 1.7201266969718063e-06, + "loss": 0.4665, + "step": 11379 + }, + { + "epoch": 0.4695881818932079, + "grad_norm": 3.8170033283239486, + "learning_rate": 1.7199283894654779e-06, + "loss": 0.5742, + "step": 11380 + }, + { + "epoch": 0.4696294462325658, + "grad_norm": 5.7051970058626384, + "learning_rate": 1.7197300780306783e-06, + "loss": 0.5077, + "step": 11381 + }, + { + "epoch": 0.4696707105719237, + "grad_norm": 2.6499565339230777, + "learning_rate": 1.7195317626709492e-06, + "loss": 0.5334, + "step": 11382 + }, + { + "epoch": 0.4697119749112817, + "grad_norm": 2.2693581914855185, + "learning_rate": 1.7193334433898343e-06, + "loss": 0.5072, + "step": 11383 + }, + { + "epoch": 0.4697532392506396, + "grad_norm": 5.766122384734104, + "learning_rate": 1.719135120190875e-06, + "loss": 0.5878, + "step": 11384 + }, + { + "epoch": 0.4697945035899975, + "grad_norm": 2.64270616391336, + "learning_rate": 1.7189367930776146e-06, + "loss": 0.5191, + "step": 11385 + }, + { + "epoch": 0.46983576792935544, + "grad_norm": 3.0017384846628086, + "learning_rate": 1.7187384620535949e-06, + "loss": 0.5368, + "step": 11386 + }, + { + "epoch": 0.46987703226871336, + "grad_norm": 3.0319792810283173, + "learning_rate": 1.7185401271223602e-06, + "loss": 0.5211, + "step": 11387 + }, + { + "epoch": 0.46991829660807133, + "grad_norm": 8.779960694782218, + "learning_rate": 1.7183417882874511e-06, + "loss": 0.4833, + "step": 11388 + }, + { + "epoch": 0.46995956094742924, + "grad_norm": 3.3005358101806985, + "learning_rate": 1.7181434455524117e-06, + "loss": 0.4825, + "step": 11389 + }, + { + "epoch": 0.47000082528678716, + "grad_norm": 4.1085028011667095, + "learning_rate": 1.7179450989207845e-06, + "loss": 0.5646, + "step": 11390 + }, + { + "epoch": 0.4700420896261451, + "grad_norm": 2.4377426690183253, + "learning_rate": 1.7177467483961129e-06, + "loss": 0.547, + "step": 11391 + }, + { + "epoch": 0.470083353965503, + "grad_norm": 2.4761511475166897, + "learning_rate": 1.7175483939819392e-06, + "loss": 0.5272, + "step": 11392 + }, + { + "epoch": 0.47012461830486096, + "grad_norm": 2.6058059312319233, + "learning_rate": 1.7173500356818075e-06, + "loss": 0.537, + "step": 11393 + }, + { + "epoch": 0.4701658826442189, + "grad_norm": 1.964980065665498, + "learning_rate": 1.7171516734992603e-06, + "loss": 0.5074, + "step": 11394 + }, + { + "epoch": 0.4702071469835768, + "grad_norm": 30.71946981906082, + "learning_rate": 1.7169533074378408e-06, + "loss": 0.5819, + "step": 11395 + }, + { + "epoch": 0.4702484113229347, + "grad_norm": 3.3242152934994036, + "learning_rate": 1.7167549375010928e-06, + "loss": 0.5306, + "step": 11396 + }, + { + "epoch": 0.47028967566229263, + "grad_norm": 10.070845161382037, + "learning_rate": 1.7165565636925592e-06, + "loss": 0.5688, + "step": 11397 + }, + { + "epoch": 0.4703309400016506, + "grad_norm": 2.3881731302183438, + "learning_rate": 1.7163581860157833e-06, + "loss": 0.5108, + "step": 11398 + }, + { + "epoch": 0.4703722043410085, + "grad_norm": 4.3170404096679835, + "learning_rate": 1.7161598044743097e-06, + "loss": 0.5173, + "step": 11399 + }, + { + "epoch": 0.47041346868036643, + "grad_norm": 2.626640801361141, + "learning_rate": 1.7159614190716809e-06, + "loss": 0.504, + "step": 11400 + }, + { + "epoch": 0.47045473301972435, + "grad_norm": 3.0669309524785318, + "learning_rate": 1.7157630298114404e-06, + "loss": 0.5202, + "step": 11401 + }, + { + "epoch": 0.47049599735908226, + "grad_norm": 2.813119553786519, + "learning_rate": 1.7155646366971329e-06, + "loss": 0.5321, + "step": 11402 + }, + { + "epoch": 0.47053726169844023, + "grad_norm": 6.533086975891843, + "learning_rate": 1.7153662397323016e-06, + "loss": 0.5443, + "step": 11403 + }, + { + "epoch": 0.47057852603779815, + "grad_norm": 2.206610942986875, + "learning_rate": 1.7151678389204907e-06, + "loss": 0.4787, + "step": 11404 + }, + { + "epoch": 0.47061979037715607, + "grad_norm": 2.120318336843778, + "learning_rate": 1.7149694342652435e-06, + "loss": 0.5218, + "step": 11405 + }, + { + "epoch": 0.470661054716514, + "grad_norm": 2.226189547743586, + "learning_rate": 1.714771025770105e-06, + "loss": 0.4859, + "step": 11406 + }, + { + "epoch": 0.4707023190558719, + "grad_norm": 1.945658931436898, + "learning_rate": 1.7145726134386184e-06, + "loss": 0.5348, + "step": 11407 + }, + { + "epoch": 0.47074358339522987, + "grad_norm": 2.549384939486404, + "learning_rate": 1.7143741972743283e-06, + "loss": 0.5228, + "step": 11408 + }, + { + "epoch": 0.4707848477345878, + "grad_norm": 2.766831992351318, + "learning_rate": 1.7141757772807784e-06, + "loss": 0.4873, + "step": 11409 + }, + { + "epoch": 0.4708261120739457, + "grad_norm": 3.345698034383415, + "learning_rate": 1.7139773534615137e-06, + "loss": 0.5656, + "step": 11410 + }, + { + "epoch": 0.4708673764133036, + "grad_norm": 2.817628897191346, + "learning_rate": 1.713778925820078e-06, + "loss": 0.4545, + "step": 11411 + }, + { + "epoch": 0.47090864075266153, + "grad_norm": 2.153451393376396, + "learning_rate": 1.7135804943600159e-06, + "loss": 0.5835, + "step": 11412 + }, + { + "epoch": 0.47094990509201945, + "grad_norm": 2.550265472972399, + "learning_rate": 1.713382059084872e-06, + "loss": 0.5122, + "step": 11413 + }, + { + "epoch": 0.4709911694313774, + "grad_norm": 5.782178251234837, + "learning_rate": 1.713183619998191e-06, + "loss": 0.5024, + "step": 11414 + }, + { + "epoch": 0.47103243377073534, + "grad_norm": 2.6003007616817544, + "learning_rate": 1.7129851771035164e-06, + "loss": 0.5094, + "step": 11415 + }, + { + "epoch": 0.47107369811009325, + "grad_norm": 3.16256518551739, + "learning_rate": 1.7127867304043944e-06, + "loss": 0.4934, + "step": 11416 + }, + { + "epoch": 0.47111496244945117, + "grad_norm": 2.563257530730719, + "learning_rate": 1.7125882799043687e-06, + "loss": 0.5215, + "step": 11417 + }, + { + "epoch": 0.4711562267888091, + "grad_norm": 4.23733547920023, + "learning_rate": 1.7123898256069848e-06, + "loss": 0.5906, + "step": 11418 + }, + { + "epoch": 0.47119749112816706, + "grad_norm": 2.354900103306275, + "learning_rate": 1.712191367515787e-06, + "loss": 0.5302, + "step": 11419 + }, + { + "epoch": 0.471238755467525, + "grad_norm": 2.6722304420390355, + "learning_rate": 1.711992905634321e-06, + "loss": 0.5836, + "step": 11420 + }, + { + "epoch": 0.4712800198068829, + "grad_norm": 6.438199504952715, + "learning_rate": 1.711794439966131e-06, + "loss": 0.5145, + "step": 11421 + }, + { + "epoch": 0.4713212841462408, + "grad_norm": 2.022962793359227, + "learning_rate": 1.711595970514763e-06, + "loss": 0.4735, + "step": 11422 + }, + { + "epoch": 0.4713625484855987, + "grad_norm": 3.127173265592817, + "learning_rate": 1.7113974972837609e-06, + "loss": 0.5736, + "step": 11423 + }, + { + "epoch": 0.4714038128249567, + "grad_norm": 3.3062919921467735, + "learning_rate": 1.7111990202766709e-06, + "loss": 0.5334, + "step": 11424 + }, + { + "epoch": 0.4714450771643146, + "grad_norm": 2.6572763745155275, + "learning_rate": 1.7110005394970383e-06, + "loss": 0.5192, + "step": 11425 + }, + { + "epoch": 0.4714863415036725, + "grad_norm": 3.005155087926632, + "learning_rate": 1.7108020549484082e-06, + "loss": 0.5493, + "step": 11426 + }, + { + "epoch": 0.47152760584303044, + "grad_norm": 7.462224778805927, + "learning_rate": 1.7106035666343256e-06, + "loss": 0.5096, + "step": 11427 + }, + { + "epoch": 0.47156887018238836, + "grad_norm": 5.946020483891885, + "learning_rate": 1.710405074558337e-06, + "loss": 0.5366, + "step": 11428 + }, + { + "epoch": 0.4716101345217463, + "grad_norm": 3.9737135490041187, + "learning_rate": 1.710206578723987e-06, + "loss": 0.5452, + "step": 11429 + }, + { + "epoch": 0.47165139886110424, + "grad_norm": 3.269410385885349, + "learning_rate": 1.7100080791348221e-06, + "loss": 0.557, + "step": 11430 + }, + { + "epoch": 0.47169266320046216, + "grad_norm": 2.0627806445631305, + "learning_rate": 1.709809575794387e-06, + "loss": 0.5313, + "step": 11431 + }, + { + "epoch": 0.4717339275398201, + "grad_norm": 3.4523864796454773, + "learning_rate": 1.7096110687062286e-06, + "loss": 0.5232, + "step": 11432 + }, + { + "epoch": 0.471775191879178, + "grad_norm": 4.703799894986229, + "learning_rate": 1.7094125578738917e-06, + "loss": 0.5346, + "step": 11433 + }, + { + "epoch": 0.47181645621853596, + "grad_norm": 1.8585017451587893, + "learning_rate": 1.7092140433009227e-06, + "loss": 0.5252, + "step": 11434 + }, + { + "epoch": 0.4718577205578939, + "grad_norm": 2.3807676272832996, + "learning_rate": 1.7090155249908679e-06, + "loss": 0.4885, + "step": 11435 + }, + { + "epoch": 0.4718989848972518, + "grad_norm": 3.5544253997885478, + "learning_rate": 1.7088170029472728e-06, + "loss": 0.5574, + "step": 11436 + }, + { + "epoch": 0.4719402492366097, + "grad_norm": 2.4391074939558886, + "learning_rate": 1.708618477173683e-06, + "loss": 0.5348, + "step": 11437 + }, + { + "epoch": 0.4719815135759676, + "grad_norm": 1.8670540099903006, + "learning_rate": 1.708419947673646e-06, + "loss": 0.4754, + "step": 11438 + }, + { + "epoch": 0.4720227779153256, + "grad_norm": 2.625540882444132, + "learning_rate": 1.708221414450707e-06, + "loss": 0.5172, + "step": 11439 + }, + { + "epoch": 0.4720640422546835, + "grad_norm": 2.7284739828566154, + "learning_rate": 1.7080228775084132e-06, + "loss": 0.5785, + "step": 11440 + }, + { + "epoch": 0.47210530659404143, + "grad_norm": 3.784572244793964, + "learning_rate": 1.70782433685031e-06, + "loss": 0.5687, + "step": 11441 + }, + { + "epoch": 0.47214657093339935, + "grad_norm": 6.187473825895653, + "learning_rate": 1.707625792479944e-06, + "loss": 0.5259, + "step": 11442 + }, + { + "epoch": 0.47218783527275726, + "grad_norm": 3.915510871326297, + "learning_rate": 1.7074272444008625e-06, + "loss": 0.5026, + "step": 11443 + }, + { + "epoch": 0.47222909961211523, + "grad_norm": 2.890906836925601, + "learning_rate": 1.7072286926166108e-06, + "loss": 0.5441, + "step": 11444 + }, + { + "epoch": 0.47227036395147315, + "grad_norm": 3.9510231963584594, + "learning_rate": 1.7070301371307369e-06, + "loss": 0.5187, + "step": 11445 + }, + { + "epoch": 0.47231162829083106, + "grad_norm": 2.8150200951093214, + "learning_rate": 1.7068315779467867e-06, + "loss": 0.533, + "step": 11446 + }, + { + "epoch": 0.472352892630189, + "grad_norm": 3.406582257427074, + "learning_rate": 1.7066330150683072e-06, + "loss": 0.5459, + "step": 11447 + }, + { + "epoch": 0.4723941569695469, + "grad_norm": 3.474134472620866, + "learning_rate": 1.7064344484988447e-06, + "loss": 0.5309, + "step": 11448 + }, + { + "epoch": 0.47243542130890487, + "grad_norm": 3.527043672736937, + "learning_rate": 1.7062358782419471e-06, + "loss": 0.5685, + "step": 11449 + }, + { + "epoch": 0.4724766856482628, + "grad_norm": 3.3893248590102734, + "learning_rate": 1.70603730430116e-06, + "loss": 0.4993, + "step": 11450 + }, + { + "epoch": 0.4725179499876207, + "grad_norm": 3.108944048790983, + "learning_rate": 1.7058387266800318e-06, + "loss": 0.5389, + "step": 11451 + }, + { + "epoch": 0.4725592143269786, + "grad_norm": 3.381020374287155, + "learning_rate": 1.7056401453821087e-06, + "loss": 0.5827, + "step": 11452 + }, + { + "epoch": 0.47260047866633653, + "grad_norm": 2.5135711784998693, + "learning_rate": 1.7054415604109389e-06, + "loss": 0.5398, + "step": 11453 + }, + { + "epoch": 0.4726417430056945, + "grad_norm": 7.6177105056698045, + "learning_rate": 1.7052429717700685e-06, + "loss": 0.5108, + "step": 11454 + }, + { + "epoch": 0.4726830073450524, + "grad_norm": 3.1395756147547313, + "learning_rate": 1.7050443794630449e-06, + "loss": 0.5281, + "step": 11455 + }, + { + "epoch": 0.47272427168441034, + "grad_norm": 2.7570330186335936, + "learning_rate": 1.7048457834934157e-06, + "loss": 0.4668, + "step": 11456 + }, + { + "epoch": 0.47276553602376825, + "grad_norm": 3.86275570478596, + "learning_rate": 1.7046471838647288e-06, + "loss": 0.5288, + "step": 11457 + }, + { + "epoch": 0.47280680036312617, + "grad_norm": 3.5526090403449975, + "learning_rate": 1.7044485805805305e-06, + "loss": 0.5113, + "step": 11458 + }, + { + "epoch": 0.47284806470248414, + "grad_norm": 4.618607680222903, + "learning_rate": 1.7042499736443702e-06, + "loss": 0.5048, + "step": 11459 + }, + { + "epoch": 0.47288932904184205, + "grad_norm": 3.5758237274580753, + "learning_rate": 1.7040513630597935e-06, + "loss": 0.5522, + "step": 11460 + }, + { + "epoch": 0.47293059338119997, + "grad_norm": 2.0620340015507823, + "learning_rate": 1.7038527488303496e-06, + "loss": 0.5244, + "step": 11461 + }, + { + "epoch": 0.4729718577205579, + "grad_norm": 2.913703434493935, + "learning_rate": 1.703654130959585e-06, + "loss": 0.5575, + "step": 11462 + }, + { + "epoch": 0.4730131220599158, + "grad_norm": 3.2857453621125687, + "learning_rate": 1.7034555094510486e-06, + "loss": 0.4963, + "step": 11463 + }, + { + "epoch": 0.4730543863992738, + "grad_norm": 2.6548546317438326, + "learning_rate": 1.7032568843082877e-06, + "loss": 0.5593, + "step": 11464 + }, + { + "epoch": 0.4730956507386317, + "grad_norm": 4.548270941189884, + "learning_rate": 1.7030582555348508e-06, + "loss": 0.5942, + "step": 11465 + }, + { + "epoch": 0.4731369150779896, + "grad_norm": 10.154700474354035, + "learning_rate": 1.7028596231342852e-06, + "loss": 0.4896, + "step": 11466 + }, + { + "epoch": 0.4731781794173475, + "grad_norm": 3.6977631342177357, + "learning_rate": 1.7026609871101391e-06, + "loss": 0.5105, + "step": 11467 + }, + { + "epoch": 0.47321944375670544, + "grad_norm": 5.33607164129653, + "learning_rate": 1.7024623474659608e-06, + "loss": 0.4874, + "step": 11468 + }, + { + "epoch": 0.4732607080960634, + "grad_norm": 3.27731928480669, + "learning_rate": 1.7022637042052986e-06, + "loss": 0.5588, + "step": 11469 + }, + { + "epoch": 0.4733019724354213, + "grad_norm": 7.942966089202435, + "learning_rate": 1.7020650573317002e-06, + "loss": 0.5481, + "step": 11470 + }, + { + "epoch": 0.47334323677477924, + "grad_norm": 2.7638271440710236, + "learning_rate": 1.701866406848715e-06, + "loss": 0.51, + "step": 11471 + }, + { + "epoch": 0.47338450111413716, + "grad_norm": 2.432197896853232, + "learning_rate": 1.7016677527598903e-06, + "loss": 0.5443, + "step": 11472 + }, + { + "epoch": 0.4734257654534951, + "grad_norm": 3.828104700669514, + "learning_rate": 1.7014690950687756e-06, + "loss": 0.5015, + "step": 11473 + }, + { + "epoch": 0.473467029792853, + "grad_norm": 2.1784530046347625, + "learning_rate": 1.7012704337789187e-06, + "loss": 0.5125, + "step": 11474 + }, + { + "epoch": 0.47350829413221096, + "grad_norm": 4.2014369313112425, + "learning_rate": 1.7010717688938683e-06, + "loss": 0.5636, + "step": 11475 + }, + { + "epoch": 0.4735495584715689, + "grad_norm": 4.377254738686474, + "learning_rate": 1.7008731004171728e-06, + "loss": 0.5246, + "step": 11476 + }, + { + "epoch": 0.4735908228109268, + "grad_norm": 2.004361495778303, + "learning_rate": 1.7006744283523814e-06, + "loss": 0.4894, + "step": 11477 + }, + { + "epoch": 0.4736320871502847, + "grad_norm": 3.0908987358408657, + "learning_rate": 1.7004757527030426e-06, + "loss": 0.6072, + "step": 11478 + }, + { + "epoch": 0.4736733514896426, + "grad_norm": 2.969887884222885, + "learning_rate": 1.7002770734727056e-06, + "loss": 0.5618, + "step": 11479 + }, + { + "epoch": 0.4737146158290006, + "grad_norm": 5.4900739244275725, + "learning_rate": 1.7000783906649194e-06, + "loss": 0.554, + "step": 11480 + }, + { + "epoch": 0.4737558801683585, + "grad_norm": 2.5524350303297294, + "learning_rate": 1.6998797042832318e-06, + "loss": 0.5261, + "step": 11481 + }, + { + "epoch": 0.47379714450771643, + "grad_norm": 2.7531641394428936, + "learning_rate": 1.699681014331193e-06, + "loss": 0.5073, + "step": 11482 + }, + { + "epoch": 0.47383840884707434, + "grad_norm": 7.7791083726551955, + "learning_rate": 1.6994823208123516e-06, + "loss": 0.5305, + "step": 11483 + }, + { + "epoch": 0.47387967318643226, + "grad_norm": 3.9152119437148034, + "learning_rate": 1.6992836237302571e-06, + "loss": 0.5212, + "step": 11484 + }, + { + "epoch": 0.47392093752579023, + "grad_norm": 4.201293336910619, + "learning_rate": 1.6990849230884588e-06, + "loss": 0.4886, + "step": 11485 + }, + { + "epoch": 0.47396220186514815, + "grad_norm": 3.0884062450636893, + "learning_rate": 1.6988862188905056e-06, + "loss": 0.4533, + "step": 11486 + }, + { + "epoch": 0.47400346620450606, + "grad_norm": 3.989543601986451, + "learning_rate": 1.6986875111399466e-06, + "loss": 0.5471, + "step": 11487 + }, + { + "epoch": 0.474044730543864, + "grad_norm": 2.5411867155627923, + "learning_rate": 1.6984887998403321e-06, + "loss": 0.4819, + "step": 11488 + }, + { + "epoch": 0.4740859948832219, + "grad_norm": 2.817229754934732, + "learning_rate": 1.6982900849952107e-06, + "loss": 0.513, + "step": 11489 + }, + { + "epoch": 0.47412725922257987, + "grad_norm": 2.2677201560613853, + "learning_rate": 1.6980913666081327e-06, + "loss": 0.5398, + "step": 11490 + }, + { + "epoch": 0.4741685235619378, + "grad_norm": 5.748371962447084, + "learning_rate": 1.697892644682647e-06, + "loss": 0.533, + "step": 11491 + }, + { + "epoch": 0.4742097879012957, + "grad_norm": 3.308461960153597, + "learning_rate": 1.6976939192223041e-06, + "loss": 0.463, + "step": 11492 + }, + { + "epoch": 0.4742510522406536, + "grad_norm": 2.2644318228534397, + "learning_rate": 1.6974951902306528e-06, + "loss": 0.5275, + "step": 11493 + }, + { + "epoch": 0.47429231658001153, + "grad_norm": 4.019102709800903, + "learning_rate": 1.6972964577112436e-06, + "loss": 0.5452, + "step": 11494 + }, + { + "epoch": 0.4743335809193695, + "grad_norm": 5.4632357086933725, + "learning_rate": 1.697097721667626e-06, + "loss": 0.4689, + "step": 11495 + }, + { + "epoch": 0.4743748452587274, + "grad_norm": 3.042491313049562, + "learning_rate": 1.69689898210335e-06, + "loss": 0.5788, + "step": 11496 + }, + { + "epoch": 0.47441610959808533, + "grad_norm": 2.157341432251246, + "learning_rate": 1.6967002390219652e-06, + "loss": 0.554, + "step": 11497 + }, + { + "epoch": 0.47445737393744325, + "grad_norm": 2.545105430062033, + "learning_rate": 1.6965014924270227e-06, + "loss": 0.5695, + "step": 11498 + }, + { + "epoch": 0.47449863827680117, + "grad_norm": 20.474461645731996, + "learning_rate": 1.6963027423220717e-06, + "loss": 0.5709, + "step": 11499 + }, + { + "epoch": 0.47453990261615914, + "grad_norm": 7.287741968299576, + "learning_rate": 1.6961039887106632e-06, + "loss": 0.5208, + "step": 11500 + }, + { + "epoch": 0.47458116695551705, + "grad_norm": 2.033493554581963, + "learning_rate": 1.6959052315963459e-06, + "loss": 0.4798, + "step": 11501 + }, + { + "epoch": 0.47462243129487497, + "grad_norm": 12.246440774935154, + "learning_rate": 1.6957064709826718e-06, + "loss": 0.5113, + "step": 11502 + }, + { + "epoch": 0.4746636956342329, + "grad_norm": 4.704094913741126, + "learning_rate": 1.69550770687319e-06, + "loss": 0.5345, + "step": 11503 + }, + { + "epoch": 0.4747049599735908, + "grad_norm": 6.358805528142307, + "learning_rate": 1.6953089392714523e-06, + "loss": 0.5466, + "step": 11504 + }, + { + "epoch": 0.4747462243129488, + "grad_norm": 4.6509900441505545, + "learning_rate": 1.6951101681810077e-06, + "loss": 0.4786, + "step": 11505 + }, + { + "epoch": 0.4747874886523067, + "grad_norm": 6.044242973012369, + "learning_rate": 1.6949113936054077e-06, + "loss": 0.5305, + "step": 11506 + }, + { + "epoch": 0.4748287529916646, + "grad_norm": 5.732330778956035, + "learning_rate": 1.6947126155482022e-06, + "loss": 0.4794, + "step": 11507 + }, + { + "epoch": 0.4748700173310225, + "grad_norm": 2.6260601984724983, + "learning_rate": 1.6945138340129427e-06, + "loss": 0.458, + "step": 11508 + }, + { + "epoch": 0.47491128167038044, + "grad_norm": 5.774172879132466, + "learning_rate": 1.6943150490031792e-06, + "loss": 0.5109, + "step": 11509 + }, + { + "epoch": 0.4749525460097384, + "grad_norm": 2.1369324488345725, + "learning_rate": 1.6941162605224632e-06, + "loss": 0.54, + "step": 11510 + }, + { + "epoch": 0.4749938103490963, + "grad_norm": 5.070621627370412, + "learning_rate": 1.6939174685743447e-06, + "loss": 0.5146, + "step": 11511 + }, + { + "epoch": 0.47503507468845424, + "grad_norm": 3.2649234989529075, + "learning_rate": 1.6937186731623756e-06, + "loss": 0.5662, + "step": 11512 + }, + { + "epoch": 0.47507633902781216, + "grad_norm": 18.405982649539183, + "learning_rate": 1.6935198742901063e-06, + "loss": 0.5444, + "step": 11513 + }, + { + "epoch": 0.47511760336717007, + "grad_norm": 2.458356150381122, + "learning_rate": 1.693321071961088e-06, + "loss": 0.5803, + "step": 11514 + }, + { + "epoch": 0.47515886770652804, + "grad_norm": 2.3039933177998315, + "learning_rate": 1.6931222661788714e-06, + "loss": 0.5095, + "step": 11515 + }, + { + "epoch": 0.47520013204588596, + "grad_norm": 4.009133574271387, + "learning_rate": 1.6929234569470081e-06, + "loss": 0.4936, + "step": 11516 + }, + { + "epoch": 0.4752413963852439, + "grad_norm": 4.527148561928596, + "learning_rate": 1.6927246442690494e-06, + "loss": 0.5203, + "step": 11517 + }, + { + "epoch": 0.4752826607246018, + "grad_norm": 3.7696649546590097, + "learning_rate": 1.692525828148546e-06, + "loss": 0.5861, + "step": 11518 + }, + { + "epoch": 0.4753239250639597, + "grad_norm": 5.177692812456853, + "learning_rate": 1.6923270085890504e-06, + "loss": 0.5404, + "step": 11519 + }, + { + "epoch": 0.4753651894033177, + "grad_norm": 5.1239861340985415, + "learning_rate": 1.6921281855941124e-06, + "loss": 0.5346, + "step": 11520 + }, + { + "epoch": 0.4754064537426756, + "grad_norm": 3.231554046515027, + "learning_rate": 1.6919293591672851e-06, + "loss": 0.5012, + "step": 11521 + }, + { + "epoch": 0.4754477180820335, + "grad_norm": 15.05901331674591, + "learning_rate": 1.6917305293121193e-06, + "loss": 0.5264, + "step": 11522 + }, + { + "epoch": 0.4754889824213914, + "grad_norm": 1.7704021491542579, + "learning_rate": 1.691531696032166e-06, + "loss": 0.4815, + "step": 11523 + }, + { + "epoch": 0.47553024676074934, + "grad_norm": 3.306309899406117, + "learning_rate": 1.6913328593309784e-06, + "loss": 0.4854, + "step": 11524 + }, + { + "epoch": 0.4755715111001073, + "grad_norm": 2.4881613418678916, + "learning_rate": 1.691134019212107e-06, + "loss": 0.5227, + "step": 11525 + }, + { + "epoch": 0.47561277543946523, + "grad_norm": 3.0059756303716045, + "learning_rate": 1.6909351756791032e-06, + "loss": 0.5577, + "step": 11526 + }, + { + "epoch": 0.47565403977882315, + "grad_norm": 3.122883314869388, + "learning_rate": 1.6907363287355202e-06, + "loss": 0.537, + "step": 11527 + }, + { + "epoch": 0.47569530411818106, + "grad_norm": 4.123002230373241, + "learning_rate": 1.6905374783849087e-06, + "loss": 0.5355, + "step": 11528 + }, + { + "epoch": 0.475736568457539, + "grad_norm": 4.129786439216994, + "learning_rate": 1.6903386246308215e-06, + "loss": 0.5545, + "step": 11529 + }, + { + "epoch": 0.47577783279689695, + "grad_norm": 4.706033942560637, + "learning_rate": 1.69013976747681e-06, + "loss": 0.5259, + "step": 11530 + }, + { + "epoch": 0.47581909713625486, + "grad_norm": 2.887003199611663, + "learning_rate": 1.689940906926427e-06, + "loss": 0.5124, + "step": 11531 + }, + { + "epoch": 0.4758603614756128, + "grad_norm": 6.26156959579274, + "learning_rate": 1.6897420429832243e-06, + "loss": 0.464, + "step": 11532 + }, + { + "epoch": 0.4759016258149707, + "grad_norm": 3.0246385691056403, + "learning_rate": 1.6895431756507541e-06, + "loss": 0.5657, + "step": 11533 + }, + { + "epoch": 0.4759428901543286, + "grad_norm": 1.8119684320134604, + "learning_rate": 1.6893443049325683e-06, + "loss": 0.4937, + "step": 11534 + }, + { + "epoch": 0.47598415449368653, + "grad_norm": 2.409167623836112, + "learning_rate": 1.6891454308322193e-06, + "loss": 0.4888, + "step": 11535 + }, + { + "epoch": 0.4760254188330445, + "grad_norm": 4.378574503041274, + "learning_rate": 1.6889465533532602e-06, + "loss": 0.4814, + "step": 11536 + }, + { + "epoch": 0.4760666831724024, + "grad_norm": 2.896001746418189, + "learning_rate": 1.6887476724992429e-06, + "loss": 0.6001, + "step": 11537 + }, + { + "epoch": 0.47610794751176033, + "grad_norm": 6.271496327552867, + "learning_rate": 1.68854878827372e-06, + "loss": 0.5162, + "step": 11538 + }, + { + "epoch": 0.47614921185111825, + "grad_norm": 3.3672313736070874, + "learning_rate": 1.6883499006802443e-06, + "loss": 0.528, + "step": 11539 + }, + { + "epoch": 0.47619047619047616, + "grad_norm": 7.363992578175201, + "learning_rate": 1.6881510097223677e-06, + "loss": 0.4873, + "step": 11540 + }, + { + "epoch": 0.47623174052983414, + "grad_norm": 3.093730008387272, + "learning_rate": 1.6879521154036436e-06, + "loss": 0.5529, + "step": 11541 + }, + { + "epoch": 0.47627300486919205, + "grad_norm": 14.359409548145399, + "learning_rate": 1.6877532177276244e-06, + "loss": 0.5281, + "step": 11542 + }, + { + "epoch": 0.47631426920854997, + "grad_norm": 2.5501851012397108, + "learning_rate": 1.6875543166978633e-06, + "loss": 0.5273, + "step": 11543 + }, + { + "epoch": 0.4763555335479079, + "grad_norm": 3.030263484518884, + "learning_rate": 1.6873554123179126e-06, + "loss": 0.5115, + "step": 11544 + }, + { + "epoch": 0.4763967978872658, + "grad_norm": 3.323737001338491, + "learning_rate": 1.6871565045913259e-06, + "loss": 0.5288, + "step": 11545 + }, + { + "epoch": 0.47643806222662377, + "grad_norm": 2.882255036798056, + "learning_rate": 1.6869575935216555e-06, + "loss": 0.5476, + "step": 11546 + }, + { + "epoch": 0.4764793265659817, + "grad_norm": 9.719814916034991, + "learning_rate": 1.686758679112455e-06, + "loss": 0.5031, + "step": 11547 + }, + { + "epoch": 0.4765205909053396, + "grad_norm": 2.371974037459946, + "learning_rate": 1.6865597613672772e-06, + "loss": 0.4893, + "step": 11548 + }, + { + "epoch": 0.4765618552446975, + "grad_norm": 50.31432988721583, + "learning_rate": 1.6863608402896754e-06, + "loss": 0.4846, + "step": 11549 + }, + { + "epoch": 0.47660311958405543, + "grad_norm": 4.5858338025889385, + "learning_rate": 1.6861619158832026e-06, + "loss": 0.5153, + "step": 11550 + }, + { + "epoch": 0.4766443839234134, + "grad_norm": 2.1311397711848756, + "learning_rate": 1.6859629881514127e-06, + "loss": 0.5039, + "step": 11551 + }, + { + "epoch": 0.4766856482627713, + "grad_norm": 2.6967273430081504, + "learning_rate": 1.6857640570978583e-06, + "loss": 0.5302, + "step": 11552 + }, + { + "epoch": 0.47672691260212924, + "grad_norm": 17.146538282556424, + "learning_rate": 1.6855651227260937e-06, + "loss": 0.5431, + "step": 11553 + }, + { + "epoch": 0.47676817694148715, + "grad_norm": 2.3302996425709446, + "learning_rate": 1.6853661850396708e-06, + "loss": 0.5579, + "step": 11554 + }, + { + "epoch": 0.47680944128084507, + "grad_norm": 13.714508814947562, + "learning_rate": 1.685167244042145e-06, + "loss": 0.5544, + "step": 11555 + }, + { + "epoch": 0.47685070562020304, + "grad_norm": 2.9380475751071327, + "learning_rate": 1.6849682997370684e-06, + "loss": 0.5475, + "step": 11556 + }, + { + "epoch": 0.47689196995956096, + "grad_norm": 2.3763253681336574, + "learning_rate": 1.6847693521279955e-06, + "loss": 0.4587, + "step": 11557 + }, + { + "epoch": 0.4769332342989189, + "grad_norm": 3.867669190552118, + "learning_rate": 1.68457040121848e-06, + "loss": 0.5435, + "step": 11558 + }, + { + "epoch": 0.4769744986382768, + "grad_norm": 2.475864161664133, + "learning_rate": 1.6843714470120747e-06, + "loss": 0.497, + "step": 11559 + }, + { + "epoch": 0.4770157629776347, + "grad_norm": 3.960380312656281, + "learning_rate": 1.6841724895123348e-06, + "loss": 0.5629, + "step": 11560 + }, + { + "epoch": 0.4770570273169927, + "grad_norm": 2.2887282359536085, + "learning_rate": 1.6839735287228132e-06, + "loss": 0.5179, + "step": 11561 + }, + { + "epoch": 0.4770982916563506, + "grad_norm": 4.698071228003008, + "learning_rate": 1.6837745646470643e-06, + "loss": 0.5318, + "step": 11562 + }, + { + "epoch": 0.4771395559957085, + "grad_norm": 6.117037344160858, + "learning_rate": 1.6835755972886419e-06, + "loss": 0.545, + "step": 11563 + }, + { + "epoch": 0.4771808203350664, + "grad_norm": 2.8212803992666626, + "learning_rate": 1.6833766266510998e-06, + "loss": 0.5882, + "step": 11564 + }, + { + "epoch": 0.47722208467442434, + "grad_norm": 2.4097962610363823, + "learning_rate": 1.683177652737993e-06, + "loss": 0.5273, + "step": 11565 + }, + { + "epoch": 0.4772633490137823, + "grad_norm": 3.309734092239265, + "learning_rate": 1.6829786755528748e-06, + "loss": 0.5276, + "step": 11566 + }, + { + "epoch": 0.47730461335314023, + "grad_norm": 9.63647592796972, + "learning_rate": 1.6827796950992993e-06, + "loss": 0.5568, + "step": 11567 + }, + { + "epoch": 0.47734587769249814, + "grad_norm": 3.6407505682523866, + "learning_rate": 1.6825807113808218e-06, + "loss": 0.5631, + "step": 11568 + }, + { + "epoch": 0.47738714203185606, + "grad_norm": 2.701870056076651, + "learning_rate": 1.6823817244009955e-06, + "loss": 0.5308, + "step": 11569 + }, + { + "epoch": 0.477428406371214, + "grad_norm": 4.052628006718057, + "learning_rate": 1.682182734163376e-06, + "loss": 0.5061, + "step": 11570 + }, + { + "epoch": 0.47746967071057195, + "grad_norm": 2.8479344686098824, + "learning_rate": 1.6819837406715168e-06, + "loss": 0.5365, + "step": 11571 + }, + { + "epoch": 0.47751093504992986, + "grad_norm": 15.109144533076366, + "learning_rate": 1.6817847439289728e-06, + "loss": 0.5649, + "step": 11572 + }, + { + "epoch": 0.4775521993892878, + "grad_norm": 2.962323872940127, + "learning_rate": 1.681585743939298e-06, + "loss": 0.5364, + "step": 11573 + }, + { + "epoch": 0.4775934637286457, + "grad_norm": 5.97587352426726, + "learning_rate": 1.6813867407060482e-06, + "loss": 0.5338, + "step": 11574 + }, + { + "epoch": 0.4776347280680036, + "grad_norm": 4.558543808832741, + "learning_rate": 1.6811877342327772e-06, + "loss": 0.5782, + "step": 11575 + }, + { + "epoch": 0.4776759924073616, + "grad_norm": 2.280033447619447, + "learning_rate": 1.68098872452304e-06, + "loss": 0.5584, + "step": 11576 + }, + { + "epoch": 0.4777172567467195, + "grad_norm": 3.2455850515939164, + "learning_rate": 1.6807897115803915e-06, + "loss": 0.4614, + "step": 11577 + }, + { + "epoch": 0.4777585210860774, + "grad_norm": 2.561399660188207, + "learning_rate": 1.6805906954083868e-06, + "loss": 0.4911, + "step": 11578 + }, + { + "epoch": 0.47779978542543533, + "grad_norm": 4.131909178616064, + "learning_rate": 1.68039167601058e-06, + "loss": 0.5022, + "step": 11579 + }, + { + "epoch": 0.47784104976479325, + "grad_norm": 6.316266842435776, + "learning_rate": 1.6801926533905268e-06, + "loss": 0.5191, + "step": 11580 + }, + { + "epoch": 0.4778823141041512, + "grad_norm": 5.398598324453377, + "learning_rate": 1.6799936275517817e-06, + "loss": 0.5343, + "step": 11581 + }, + { + "epoch": 0.47792357844350913, + "grad_norm": 2.247716302795338, + "learning_rate": 1.6797945984979007e-06, + "loss": 0.5347, + "step": 11582 + }, + { + "epoch": 0.47796484278286705, + "grad_norm": 9.964692076697439, + "learning_rate": 1.679595566232438e-06, + "loss": 0.5669, + "step": 11583 + }, + { + "epoch": 0.47800610712222497, + "grad_norm": 10.583767301027708, + "learning_rate": 1.6793965307589499e-06, + "loss": 0.5023, + "step": 11584 + }, + { + "epoch": 0.4780473714615829, + "grad_norm": 3.1872012884118646, + "learning_rate": 1.6791974920809899e-06, + "loss": 0.4976, + "step": 11585 + }, + { + "epoch": 0.47808863580094085, + "grad_norm": 3.3499669121436284, + "learning_rate": 1.6789984502021152e-06, + "loss": 0.5054, + "step": 11586 + }, + { + "epoch": 0.47812990014029877, + "grad_norm": 2.760449174466834, + "learning_rate": 1.6787994051258797e-06, + "loss": 0.5446, + "step": 11587 + }, + { + "epoch": 0.4781711644796567, + "grad_norm": 4.671260234615193, + "learning_rate": 1.6786003568558403e-06, + "loss": 0.5242, + "step": 11588 + }, + { + "epoch": 0.4782124288190146, + "grad_norm": 4.337913350207718, + "learning_rate": 1.6784013053955511e-06, + "loss": 0.5519, + "step": 11589 + }, + { + "epoch": 0.4782536931583725, + "grad_norm": 6.325663368490691, + "learning_rate": 1.6782022507485685e-06, + "loss": 0.515, + "step": 11590 + }, + { + "epoch": 0.4782949574977305, + "grad_norm": 2.3181239058212784, + "learning_rate": 1.6780031929184484e-06, + "loss": 0.5413, + "step": 11591 + }, + { + "epoch": 0.4783362218370884, + "grad_norm": 2.601608245342239, + "learning_rate": 1.6778041319087455e-06, + "loss": 0.5096, + "step": 11592 + }, + { + "epoch": 0.4783774861764463, + "grad_norm": 1.8670034170216916, + "learning_rate": 1.677605067723016e-06, + "loss": 0.4752, + "step": 11593 + }, + { + "epoch": 0.47841875051580424, + "grad_norm": 2.052434751387455, + "learning_rate": 1.6774060003648158e-06, + "loss": 0.5122, + "step": 11594 + }, + { + "epoch": 0.47846001485516215, + "grad_norm": 4.348735776646083, + "learning_rate": 1.6772069298377006e-06, + "loss": 0.5438, + "step": 11595 + }, + { + "epoch": 0.47850127919452007, + "grad_norm": 3.9432165266120456, + "learning_rate": 1.6770078561452262e-06, + "loss": 0.5713, + "step": 11596 + }, + { + "epoch": 0.47854254353387804, + "grad_norm": 6.504019060678681, + "learning_rate": 1.6768087792909492e-06, + "loss": 0.5681, + "step": 11597 + }, + { + "epoch": 0.47858380787323596, + "grad_norm": 2.7806532917585334, + "learning_rate": 1.6766096992784243e-06, + "loss": 0.5607, + "step": 11598 + }, + { + "epoch": 0.47862507221259387, + "grad_norm": 4.676700086032874, + "learning_rate": 1.676410616111209e-06, + "loss": 0.5886, + "step": 11599 + }, + { + "epoch": 0.4786663365519518, + "grad_norm": 3.7256049274181593, + "learning_rate": 1.6762115297928583e-06, + "loss": 0.5581, + "step": 11600 + }, + { + "epoch": 0.4787076008913097, + "grad_norm": 2.7508667291022277, + "learning_rate": 1.6760124403269292e-06, + "loss": 0.4952, + "step": 11601 + }, + { + "epoch": 0.4787488652306677, + "grad_norm": 2.180107290943602, + "learning_rate": 1.6758133477169777e-06, + "loss": 0.5169, + "step": 11602 + }, + { + "epoch": 0.4787901295700256, + "grad_norm": 2.464757183145632, + "learning_rate": 1.6756142519665597e-06, + "loss": 0.4971, + "step": 11603 + }, + { + "epoch": 0.4788313939093835, + "grad_norm": 3.7561793513729596, + "learning_rate": 1.6754151530792318e-06, + "loss": 0.5744, + "step": 11604 + }, + { + "epoch": 0.4788726582487414, + "grad_norm": 2.731493255725924, + "learning_rate": 1.6752160510585507e-06, + "loss": 0.4847, + "step": 11605 + }, + { + "epoch": 0.47891392258809934, + "grad_norm": 2.60675833530481, + "learning_rate": 1.6750169459080725e-06, + "loss": 0.5263, + "step": 11606 + }, + { + "epoch": 0.4789551869274573, + "grad_norm": 3.9505262334032487, + "learning_rate": 1.6748178376313538e-06, + "loss": 0.5491, + "step": 11607 + }, + { + "epoch": 0.4789964512668152, + "grad_norm": 2.2229331539508626, + "learning_rate": 1.6746187262319508e-06, + "loss": 0.5809, + "step": 11608 + }, + { + "epoch": 0.47903771560617314, + "grad_norm": 2.2622049274880673, + "learning_rate": 1.6744196117134214e-06, + "loss": 0.5151, + "step": 11609 + }, + { + "epoch": 0.47907897994553106, + "grad_norm": 2.1403227958359086, + "learning_rate": 1.6742204940793204e-06, + "loss": 0.5399, + "step": 11610 + }, + { + "epoch": 0.479120244284889, + "grad_norm": 44.894831008193805, + "learning_rate": 1.6740213733332068e-06, + "loss": 0.595, + "step": 11611 + }, + { + "epoch": 0.47916150862424695, + "grad_norm": 2.907837660672396, + "learning_rate": 1.6738222494786351e-06, + "loss": 0.5452, + "step": 11612 + }, + { + "epoch": 0.47920277296360486, + "grad_norm": 2.9693379282625862, + "learning_rate": 1.6736231225191636e-06, + "loss": 0.5065, + "step": 11613 + }, + { + "epoch": 0.4792440373029628, + "grad_norm": 4.2138537993897165, + "learning_rate": 1.6734239924583487e-06, + "loss": 0.4793, + "step": 11614 + }, + { + "epoch": 0.4792853016423207, + "grad_norm": 2.925540427418511, + "learning_rate": 1.6732248592997474e-06, + "loss": 0.4764, + "step": 11615 + }, + { + "epoch": 0.4793265659816786, + "grad_norm": 27.293610618193878, + "learning_rate": 1.6730257230469165e-06, + "loss": 0.5706, + "step": 11616 + }, + { + "epoch": 0.4793678303210366, + "grad_norm": 4.345683242027169, + "learning_rate": 1.6728265837034141e-06, + "loss": 0.5308, + "step": 11617 + }, + { + "epoch": 0.4794090946603945, + "grad_norm": 3.728385352554204, + "learning_rate": 1.672627441272796e-06, + "loss": 0.4996, + "step": 11618 + }, + { + "epoch": 0.4794503589997524, + "grad_norm": 3.273318715463173, + "learning_rate": 1.6724282957586202e-06, + "loss": 0.5023, + "step": 11619 + }, + { + "epoch": 0.47949162333911033, + "grad_norm": 4.065391047991541, + "learning_rate": 1.6722291471644434e-06, + "loss": 0.5273, + "step": 11620 + }, + { + "epoch": 0.47953288767846824, + "grad_norm": 8.016314326914962, + "learning_rate": 1.6720299954938235e-06, + "loss": 0.5116, + "step": 11621 + }, + { + "epoch": 0.4795741520178262, + "grad_norm": 4.343698246265622, + "learning_rate": 1.6718308407503173e-06, + "loss": 0.5648, + "step": 11622 + }, + { + "epoch": 0.47961541635718413, + "grad_norm": 2.682477050902934, + "learning_rate": 1.6716316829374825e-06, + "loss": 0.568, + "step": 11623 + }, + { + "epoch": 0.47965668069654205, + "grad_norm": 2.9593405076550887, + "learning_rate": 1.6714325220588769e-06, + "loss": 0.5297, + "step": 11624 + }, + { + "epoch": 0.47969794503589996, + "grad_norm": 3.687170493675891, + "learning_rate": 1.671233358118057e-06, + "loss": 0.5129, + "step": 11625 + }, + { + "epoch": 0.4797392093752579, + "grad_norm": 2.1301857315982895, + "learning_rate": 1.6710341911185808e-06, + "loss": 0.4856, + "step": 11626 + }, + { + "epoch": 0.47978047371461585, + "grad_norm": 9.755470242708643, + "learning_rate": 1.6708350210640067e-06, + "loss": 0.5445, + "step": 11627 + }, + { + "epoch": 0.47982173805397377, + "grad_norm": 3.1266521677352386, + "learning_rate": 1.6706358479578914e-06, + "loss": 0.5563, + "step": 11628 + }, + { + "epoch": 0.4798630023933317, + "grad_norm": 3.58349766460654, + "learning_rate": 1.6704366718037933e-06, + "loss": 0.5029, + "step": 11629 + }, + { + "epoch": 0.4799042667326896, + "grad_norm": 3.6484918194814138, + "learning_rate": 1.6702374926052698e-06, + "loss": 0.4893, + "step": 11630 + }, + { + "epoch": 0.4799455310720475, + "grad_norm": 10.495929067572034, + "learning_rate": 1.6700383103658786e-06, + "loss": 0.5387, + "step": 11631 + }, + { + "epoch": 0.4799867954114055, + "grad_norm": 4.040659075202425, + "learning_rate": 1.6698391250891778e-06, + "loss": 0.4939, + "step": 11632 + }, + { + "epoch": 0.4800280597507634, + "grad_norm": 5.183776896186812, + "learning_rate": 1.6696399367787253e-06, + "loss": 0.5457, + "step": 11633 + }, + { + "epoch": 0.4800693240901213, + "grad_norm": 3.793655388219855, + "learning_rate": 1.6694407454380792e-06, + "loss": 0.5485, + "step": 11634 + }, + { + "epoch": 0.48011058842947923, + "grad_norm": 2.4826074649007777, + "learning_rate": 1.6692415510707979e-06, + "loss": 0.5437, + "step": 11635 + }, + { + "epoch": 0.48015185276883715, + "grad_norm": 2.5611305516391343, + "learning_rate": 1.6690423536804385e-06, + "loss": 0.5212, + "step": 11636 + }, + { + "epoch": 0.4801931171081951, + "grad_norm": 2.236682674634576, + "learning_rate": 1.6688431532705603e-06, + "loss": 0.5066, + "step": 11637 + }, + { + "epoch": 0.48023438144755304, + "grad_norm": 4.979376290635118, + "learning_rate": 1.6686439498447206e-06, + "loss": 0.5191, + "step": 11638 + }, + { + "epoch": 0.48027564578691095, + "grad_norm": 5.964271644730321, + "learning_rate": 1.6684447434064784e-06, + "loss": 0.5261, + "step": 11639 + }, + { + "epoch": 0.48031691012626887, + "grad_norm": 2.9951707926745628, + "learning_rate": 1.6682455339593915e-06, + "loss": 0.5654, + "step": 11640 + }, + { + "epoch": 0.4803581744656268, + "grad_norm": 2.055818332914646, + "learning_rate": 1.6680463215070186e-06, + "loss": 0.5076, + "step": 11641 + }, + { + "epoch": 0.48039943880498476, + "grad_norm": 3.2706564034025423, + "learning_rate": 1.6678471060529179e-06, + "loss": 0.516, + "step": 11642 + }, + { + "epoch": 0.4804407031443427, + "grad_norm": 3.3324677042456528, + "learning_rate": 1.667647887600648e-06, + "loss": 0.5268, + "step": 11643 + }, + { + "epoch": 0.4804819674837006, + "grad_norm": 2.858357422013957, + "learning_rate": 1.6674486661537679e-06, + "loss": 0.5204, + "step": 11644 + }, + { + "epoch": 0.4805232318230585, + "grad_norm": 6.519419777906229, + "learning_rate": 1.667249441715835e-06, + "loss": 0.5076, + "step": 11645 + }, + { + "epoch": 0.4805644961624164, + "grad_norm": 2.753971004557036, + "learning_rate": 1.667050214290409e-06, + "loss": 0.5277, + "step": 11646 + }, + { + "epoch": 0.4806057605017744, + "grad_norm": 9.536881959380567, + "learning_rate": 1.6668509838810484e-06, + "loss": 0.5156, + "step": 11647 + }, + { + "epoch": 0.4806470248411323, + "grad_norm": 2.985406271298905, + "learning_rate": 1.6666517504913117e-06, + "loss": 0.5061, + "step": 11648 + }, + { + "epoch": 0.4806882891804902, + "grad_norm": 4.403267655245343, + "learning_rate": 1.6664525141247577e-06, + "loss": 0.4913, + "step": 11649 + }, + { + "epoch": 0.48072955351984814, + "grad_norm": 24.36935480744689, + "learning_rate": 1.666253274784946e-06, + "loss": 0.518, + "step": 11650 + }, + { + "epoch": 0.48077081785920606, + "grad_norm": 5.622759507288441, + "learning_rate": 1.6660540324754342e-06, + "loss": 0.5323, + "step": 11651 + }, + { + "epoch": 0.48081208219856403, + "grad_norm": 2.350217824101602, + "learning_rate": 1.6658547871997823e-06, + "loss": 0.5591, + "step": 11652 + }, + { + "epoch": 0.48085334653792194, + "grad_norm": 3.105078911635511, + "learning_rate": 1.665655538961549e-06, + "loss": 0.4857, + "step": 11653 + }, + { + "epoch": 0.48089461087727986, + "grad_norm": 2.9761854023013017, + "learning_rate": 1.6654562877642934e-06, + "loss": 0.519, + "step": 11654 + }, + { + "epoch": 0.4809358752166378, + "grad_norm": 2.836751511840186, + "learning_rate": 1.665257033611574e-06, + "loss": 0.5109, + "step": 11655 + }, + { + "epoch": 0.4809771395559957, + "grad_norm": 4.698902709402874, + "learning_rate": 1.6650577765069512e-06, + "loss": 0.5569, + "step": 11656 + }, + { + "epoch": 0.4810184038953536, + "grad_norm": 6.727006808738424, + "learning_rate": 1.6648585164539836e-06, + "loss": 0.5574, + "step": 11657 + }, + { + "epoch": 0.4810596682347116, + "grad_norm": 3.4470157639272574, + "learning_rate": 1.6646592534562304e-06, + "loss": 0.5409, + "step": 11658 + }, + { + "epoch": 0.4811009325740695, + "grad_norm": 2.6001726310528577, + "learning_rate": 1.6644599875172507e-06, + "loss": 0.5033, + "step": 11659 + }, + { + "epoch": 0.4811421969134274, + "grad_norm": 3.3799058350854785, + "learning_rate": 1.6642607186406048e-06, + "loss": 0.5116, + "step": 11660 + }, + { + "epoch": 0.4811834612527853, + "grad_norm": 2.4640493632779576, + "learning_rate": 1.6640614468298509e-06, + "loss": 0.5061, + "step": 11661 + }, + { + "epoch": 0.48122472559214324, + "grad_norm": 2.4510440351817873, + "learning_rate": 1.6638621720885497e-06, + "loss": 0.5456, + "step": 11662 + }, + { + "epoch": 0.4812659899315012, + "grad_norm": 4.8767662323600085, + "learning_rate": 1.6636628944202602e-06, + "loss": 0.4983, + "step": 11663 + }, + { + "epoch": 0.48130725427085913, + "grad_norm": 3.6827525907270133, + "learning_rate": 1.6634636138285419e-06, + "loss": 0.5374, + "step": 11664 + }, + { + "epoch": 0.48134851861021705, + "grad_norm": 4.7600333051409525, + "learning_rate": 1.663264330316954e-06, + "loss": 0.5167, + "step": 11665 + }, + { + "epoch": 0.48138978294957496, + "grad_norm": 7.773832560499841, + "learning_rate": 1.6630650438890572e-06, + "loss": 0.5787, + "step": 11666 + }, + { + "epoch": 0.4814310472889329, + "grad_norm": 12.92074149084274, + "learning_rate": 1.6628657545484107e-06, + "loss": 0.554, + "step": 11667 + }, + { + "epoch": 0.48147231162829085, + "grad_norm": 3.396965598768704, + "learning_rate": 1.6626664622985741e-06, + "loss": 0.5582, + "step": 11668 + }, + { + "epoch": 0.48151357596764877, + "grad_norm": 5.432261987582635, + "learning_rate": 1.6624671671431079e-06, + "loss": 0.5852, + "step": 11669 + }, + { + "epoch": 0.4815548403070067, + "grad_norm": 2.508424943349817, + "learning_rate": 1.6622678690855721e-06, + "loss": 0.5388, + "step": 11670 + }, + { + "epoch": 0.4815961046463646, + "grad_norm": 2.2800090540245135, + "learning_rate": 1.6620685681295254e-06, + "loss": 0.5472, + "step": 11671 + }, + { + "epoch": 0.4816373689857225, + "grad_norm": 3.0561179268074707, + "learning_rate": 1.661869264278529e-06, + "loss": 0.512, + "step": 11672 + }, + { + "epoch": 0.4816786333250805, + "grad_norm": 5.573671295715206, + "learning_rate": 1.6616699575361425e-06, + "loss": 0.486, + "step": 11673 + }, + { + "epoch": 0.4817198976644384, + "grad_norm": 3.7889127118645294, + "learning_rate": 1.6614706479059263e-06, + "loss": 0.5202, + "step": 11674 + }, + { + "epoch": 0.4817611620037963, + "grad_norm": 2.614095370508474, + "learning_rate": 1.6612713353914402e-06, + "loss": 0.4944, + "step": 11675 + }, + { + "epoch": 0.48180242634315423, + "grad_norm": 2.6644273922210138, + "learning_rate": 1.6610720199962447e-06, + "loss": 0.5563, + "step": 11676 + }, + { + "epoch": 0.48184369068251215, + "grad_norm": 2.5524985098001443, + "learning_rate": 1.6608727017239e-06, + "loss": 0.481, + "step": 11677 + }, + { + "epoch": 0.4818849550218701, + "grad_norm": 3.9969510470418563, + "learning_rate": 1.6606733805779664e-06, + "loss": 0.5203, + "step": 11678 + }, + { + "epoch": 0.48192621936122804, + "grad_norm": 3.6164183256666584, + "learning_rate": 1.6604740565620042e-06, + "loss": 0.5426, + "step": 11679 + }, + { + "epoch": 0.48196748370058595, + "grad_norm": 2.415870307172569, + "learning_rate": 1.6602747296795742e-06, + "loss": 0.5196, + "step": 11680 + }, + { + "epoch": 0.48200874803994387, + "grad_norm": 5.987456400535086, + "learning_rate": 1.660075399934236e-06, + "loss": 0.4804, + "step": 11681 + }, + { + "epoch": 0.4820500123793018, + "grad_norm": 3.438284611792041, + "learning_rate": 1.6598760673295514e-06, + "loss": 0.5319, + "step": 11682 + }, + { + "epoch": 0.48209127671865976, + "grad_norm": 5.240375801485401, + "learning_rate": 1.65967673186908e-06, + "loss": 0.5455, + "step": 11683 + }, + { + "epoch": 0.48213254105801767, + "grad_norm": 3.8890056332024985, + "learning_rate": 1.6594773935563825e-06, + "loss": 0.5315, + "step": 11684 + }, + { + "epoch": 0.4821738053973756, + "grad_norm": 2.3796103683382968, + "learning_rate": 1.6592780523950201e-06, + "loss": 0.4823, + "step": 11685 + }, + { + "epoch": 0.4822150697367335, + "grad_norm": 57.092261160638415, + "learning_rate": 1.6590787083885528e-06, + "loss": 0.55, + "step": 11686 + }, + { + "epoch": 0.4822563340760914, + "grad_norm": 3.2928767750095638, + "learning_rate": 1.658879361540542e-06, + "loss": 0.5048, + "step": 11687 + }, + { + "epoch": 0.4822975984154494, + "grad_norm": 2.2767167890592495, + "learning_rate": 1.6586800118545484e-06, + "loss": 0.4966, + "step": 11688 + }, + { + "epoch": 0.4823388627548073, + "grad_norm": 3.3533555372614225, + "learning_rate": 1.6584806593341334e-06, + "loss": 0.5313, + "step": 11689 + }, + { + "epoch": 0.4823801270941652, + "grad_norm": 5.298095457519005, + "learning_rate": 1.658281303982857e-06, + "loss": 0.5265, + "step": 11690 + }, + { + "epoch": 0.48242139143352314, + "grad_norm": 7.785545760639429, + "learning_rate": 1.6580819458042801e-06, + "loss": 0.5403, + "step": 11691 + }, + { + "epoch": 0.48246265577288105, + "grad_norm": 2.660617135756616, + "learning_rate": 1.6578825848019644e-06, + "loss": 0.5335, + "step": 11692 + }, + { + "epoch": 0.482503920112239, + "grad_norm": 10.21699104274378, + "learning_rate": 1.657683220979471e-06, + "loss": 0.5224, + "step": 11693 + }, + { + "epoch": 0.48254518445159694, + "grad_norm": 3.1219821694923535, + "learning_rate": 1.6574838543403604e-06, + "loss": 0.5135, + "step": 11694 + }, + { + "epoch": 0.48258644879095486, + "grad_norm": 2.0378272203016308, + "learning_rate": 1.6572844848881948e-06, + "loss": 0.4872, + "step": 11695 + }, + { + "epoch": 0.4826277131303128, + "grad_norm": 4.433813186517334, + "learning_rate": 1.6570851126265344e-06, + "loss": 0.5593, + "step": 11696 + }, + { + "epoch": 0.4826689774696707, + "grad_norm": 16.001219026887508, + "learning_rate": 1.6568857375589412e-06, + "loss": 0.5313, + "step": 11697 + }, + { + "epoch": 0.48271024180902866, + "grad_norm": 4.335255949820386, + "learning_rate": 1.656686359688976e-06, + "loss": 0.5477, + "step": 11698 + }, + { + "epoch": 0.4827515061483866, + "grad_norm": 8.016940710195785, + "learning_rate": 1.6564869790202007e-06, + "loss": 0.5709, + "step": 11699 + }, + { + "epoch": 0.4827927704877445, + "grad_norm": 5.9561319711878165, + "learning_rate": 1.656287595556176e-06, + "loss": 0.5539, + "step": 11700 + }, + { + "epoch": 0.4828340348271024, + "grad_norm": 2.4042687327913512, + "learning_rate": 1.6560882093004644e-06, + "loss": 0.6122, + "step": 11701 + }, + { + "epoch": 0.4828752991664603, + "grad_norm": 6.814574081604852, + "learning_rate": 1.6558888202566264e-06, + "loss": 0.5465, + "step": 11702 + }, + { + "epoch": 0.4829165635058183, + "grad_norm": 3.41110190064188, + "learning_rate": 1.655689428428225e-06, + "loss": 0.5657, + "step": 11703 + }, + { + "epoch": 0.4829578278451762, + "grad_norm": 4.4621348947751756, + "learning_rate": 1.65549003381882e-06, + "loss": 0.5007, + "step": 11704 + }, + { + "epoch": 0.48299909218453413, + "grad_norm": 3.279025862478645, + "learning_rate": 1.6552906364319747e-06, + "loss": 0.5707, + "step": 11705 + }, + { + "epoch": 0.48304035652389204, + "grad_norm": 2.71905475757414, + "learning_rate": 1.6550912362712496e-06, + "loss": 0.5623, + "step": 11706 + }, + { + "epoch": 0.48308162086324996, + "grad_norm": 2.265898574860322, + "learning_rate": 1.6548918333402071e-06, + "loss": 0.5844, + "step": 11707 + }, + { + "epoch": 0.48312288520260793, + "grad_norm": 2.997409237060023, + "learning_rate": 1.654692427642409e-06, + "loss": 0.5366, + "step": 11708 + }, + { + "epoch": 0.48316414954196585, + "grad_norm": 2.762073470291805, + "learning_rate": 1.654493019181418e-06, + "loss": 0.5362, + "step": 11709 + }, + { + "epoch": 0.48320541388132376, + "grad_norm": 2.77113880040986, + "learning_rate": 1.6542936079607941e-06, + "loss": 0.4957, + "step": 11710 + }, + { + "epoch": 0.4832466782206817, + "grad_norm": 13.389611178283015, + "learning_rate": 1.654094193984101e-06, + "loss": 0.5315, + "step": 11711 + }, + { + "epoch": 0.4832879425600396, + "grad_norm": 4.660422264935028, + "learning_rate": 1.6538947772548997e-06, + "loss": 0.5025, + "step": 11712 + }, + { + "epoch": 0.48332920689939757, + "grad_norm": 2.8140932632324374, + "learning_rate": 1.6536953577767529e-06, + "loss": 0.576, + "step": 11713 + }, + { + "epoch": 0.4833704712387555, + "grad_norm": 4.314453048319624, + "learning_rate": 1.6534959355532223e-06, + "loss": 0.5393, + "step": 11714 + }, + { + "epoch": 0.4834117355781134, + "grad_norm": 5.002791961767645, + "learning_rate": 1.6532965105878705e-06, + "loss": 0.5129, + "step": 11715 + }, + { + "epoch": 0.4834529999174713, + "grad_norm": 4.813865848331638, + "learning_rate": 1.6530970828842595e-06, + "loss": 0.5549, + "step": 11716 + }, + { + "epoch": 0.48349426425682923, + "grad_norm": 2.166183135582634, + "learning_rate": 1.6528976524459517e-06, + "loss": 0.4742, + "step": 11717 + }, + { + "epoch": 0.48353552859618715, + "grad_norm": 1.832179783380839, + "learning_rate": 1.6526982192765093e-06, + "loss": 0.4888, + "step": 11718 + }, + { + "epoch": 0.4835767929355451, + "grad_norm": 6.935436392521494, + "learning_rate": 1.6524987833794949e-06, + "loss": 0.5725, + "step": 11719 + }, + { + "epoch": 0.48361805727490303, + "grad_norm": 6.530787481696636, + "learning_rate": 1.6522993447584701e-06, + "loss": 0.5412, + "step": 11720 + }, + { + "epoch": 0.48365932161426095, + "grad_norm": 3.030557392170107, + "learning_rate": 1.6520999034169988e-06, + "loss": 0.5436, + "step": 11721 + }, + { + "epoch": 0.48370058595361887, + "grad_norm": 16.933417465421943, + "learning_rate": 1.6519004593586425e-06, + "loss": 0.5064, + "step": 11722 + }, + { + "epoch": 0.4837418502929768, + "grad_norm": 2.035608110670306, + "learning_rate": 1.6517010125869636e-06, + "loss": 0.5226, + "step": 11723 + }, + { + "epoch": 0.48378311463233475, + "grad_norm": 2.1661863206727565, + "learning_rate": 1.6515015631055253e-06, + "loss": 0.5787, + "step": 11724 + }, + { + "epoch": 0.48382437897169267, + "grad_norm": 7.0568101780198695, + "learning_rate": 1.6513021109178902e-06, + "loss": 0.4972, + "step": 11725 + }, + { + "epoch": 0.4838656433110506, + "grad_norm": 2.4540981034328837, + "learning_rate": 1.651102656027621e-06, + "loss": 0.4971, + "step": 11726 + }, + { + "epoch": 0.4839069076504085, + "grad_norm": 5.240433294512772, + "learning_rate": 1.65090319843828e-06, + "loss": 0.5235, + "step": 11727 + }, + { + "epoch": 0.4839481719897664, + "grad_norm": 2.000323537869883, + "learning_rate": 1.6507037381534306e-06, + "loss": 0.4994, + "step": 11728 + }, + { + "epoch": 0.4839894363291244, + "grad_norm": 6.661216302261018, + "learning_rate": 1.6505042751766354e-06, + "loss": 0.5572, + "step": 11729 + }, + { + "epoch": 0.4840307006684823, + "grad_norm": 6.469552515926322, + "learning_rate": 1.6503048095114576e-06, + "loss": 0.5837, + "step": 11730 + }, + { + "epoch": 0.4840719650078402, + "grad_norm": 2.3641530309137804, + "learning_rate": 1.6501053411614592e-06, + "loss": 0.4738, + "step": 11731 + }, + { + "epoch": 0.48411322934719814, + "grad_norm": 8.411208087999412, + "learning_rate": 1.6499058701302048e-06, + "loss": 0.4986, + "step": 11732 + }, + { + "epoch": 0.48415449368655605, + "grad_norm": 3.9481820078788346, + "learning_rate": 1.649706396421256e-06, + "loss": 0.5553, + "step": 11733 + }, + { + "epoch": 0.484195758025914, + "grad_norm": 4.0963549768410665, + "learning_rate": 1.6495069200381768e-06, + "loss": 0.5285, + "step": 11734 + }, + { + "epoch": 0.48423702236527194, + "grad_norm": 2.6971118966429612, + "learning_rate": 1.6493074409845294e-06, + "loss": 0.5085, + "step": 11735 + }, + { + "epoch": 0.48427828670462986, + "grad_norm": 3.5976198119507288, + "learning_rate": 1.6491079592638783e-06, + "loss": 0.4997, + "step": 11736 + }, + { + "epoch": 0.4843195510439878, + "grad_norm": 3.8742698709584027, + "learning_rate": 1.6489084748797856e-06, + "loss": 0.5009, + "step": 11737 + }, + { + "epoch": 0.4843608153833457, + "grad_norm": 3.473413289721264, + "learning_rate": 1.6487089878358153e-06, + "loss": 0.4656, + "step": 11738 + }, + { + "epoch": 0.48440207972270366, + "grad_norm": 3.7297716802448524, + "learning_rate": 1.64850949813553e-06, + "loss": 0.5298, + "step": 11739 + }, + { + "epoch": 0.4844433440620616, + "grad_norm": 2.3238096998995412, + "learning_rate": 1.6483100057824938e-06, + "loss": 0.5512, + "step": 11740 + }, + { + "epoch": 0.4844846084014195, + "grad_norm": 4.716746795499038, + "learning_rate": 1.6481105107802698e-06, + "loss": 0.5694, + "step": 11741 + }, + { + "epoch": 0.4845258727407774, + "grad_norm": 8.32797123518778, + "learning_rate": 1.6479110131324221e-06, + "loss": 0.5497, + "step": 11742 + }, + { + "epoch": 0.4845671370801353, + "grad_norm": 2.155428207909349, + "learning_rate": 1.6477115128425131e-06, + "loss": 0.5182, + "step": 11743 + }, + { + "epoch": 0.4846084014194933, + "grad_norm": 6.751564439428285, + "learning_rate": 1.6475120099141073e-06, + "loss": 0.5442, + "step": 11744 + }, + { + "epoch": 0.4846496657588512, + "grad_norm": 4.547963739150554, + "learning_rate": 1.6473125043507677e-06, + "loss": 0.4972, + "step": 11745 + }, + { + "epoch": 0.4846909300982091, + "grad_norm": 2.6025048730065374, + "learning_rate": 1.6471129961560584e-06, + "loss": 0.5529, + "step": 11746 + }, + { + "epoch": 0.48473219443756704, + "grad_norm": 2.6846669034072135, + "learning_rate": 1.6469134853335427e-06, + "loss": 0.535, + "step": 11747 + }, + { + "epoch": 0.48477345877692496, + "grad_norm": 3.5614042591001422, + "learning_rate": 1.646713971886785e-06, + "loss": 0.5483, + "step": 11748 + }, + { + "epoch": 0.48481472311628293, + "grad_norm": 2.2227465243661193, + "learning_rate": 1.6465144558193488e-06, + "loss": 0.5246, + "step": 11749 + }, + { + "epoch": 0.48485598745564085, + "grad_norm": 9.42550173595919, + "learning_rate": 1.6463149371347976e-06, + "loss": 0.5434, + "step": 11750 + }, + { + "epoch": 0.48489725179499876, + "grad_norm": 4.488743293194355, + "learning_rate": 1.6461154158366959e-06, + "loss": 0.5385, + "step": 11751 + }, + { + "epoch": 0.4849385161343567, + "grad_norm": 17.38886750880626, + "learning_rate": 1.645915891928607e-06, + "loss": 0.5258, + "step": 11752 + }, + { + "epoch": 0.4849797804737146, + "grad_norm": 9.466913585132113, + "learning_rate": 1.6457163654140953e-06, + "loss": 0.4561, + "step": 11753 + }, + { + "epoch": 0.48502104481307257, + "grad_norm": 3.3785925003389154, + "learning_rate": 1.6455168362967248e-06, + "loss": 0.567, + "step": 11754 + }, + { + "epoch": 0.4850623091524305, + "grad_norm": 2.51892134108925, + "learning_rate": 1.6453173045800598e-06, + "loss": 0.4664, + "step": 11755 + }, + { + "epoch": 0.4851035734917884, + "grad_norm": 5.229602238470511, + "learning_rate": 1.6451177702676641e-06, + "loss": 0.5647, + "step": 11756 + }, + { + "epoch": 0.4851448378311463, + "grad_norm": 3.63816864042348, + "learning_rate": 1.6449182333631022e-06, + "loss": 0.5658, + "step": 11757 + }, + { + "epoch": 0.48518610217050423, + "grad_norm": 20.589512358150508, + "learning_rate": 1.6447186938699378e-06, + "loss": 0.6054, + "step": 11758 + }, + { + "epoch": 0.4852273665098622, + "grad_norm": 4.2393017649385065, + "learning_rate": 1.6445191517917353e-06, + "loss": 0.5711, + "step": 11759 + }, + { + "epoch": 0.4852686308492201, + "grad_norm": 2.943490835533672, + "learning_rate": 1.6443196071320598e-06, + "loss": 0.5592, + "step": 11760 + }, + { + "epoch": 0.48530989518857803, + "grad_norm": 2.0998133348551495, + "learning_rate": 1.6441200598944743e-06, + "loss": 0.5448, + "step": 11761 + }, + { + "epoch": 0.48535115952793595, + "grad_norm": 2.7592175446047293, + "learning_rate": 1.6439205100825448e-06, + "loss": 0.5565, + "step": 11762 + }, + { + "epoch": 0.48539242386729387, + "grad_norm": 5.691079836586029, + "learning_rate": 1.6437209576998349e-06, + "loss": 0.5553, + "step": 11763 + }, + { + "epoch": 0.48543368820665184, + "grad_norm": 7.446983320864315, + "learning_rate": 1.6435214027499085e-06, + "loss": 0.5514, + "step": 11764 + }, + { + "epoch": 0.48547495254600975, + "grad_norm": 10.711788762068368, + "learning_rate": 1.6433218452363315e-06, + "loss": 0.5381, + "step": 11765 + }, + { + "epoch": 0.48551621688536767, + "grad_norm": 2.710435982285795, + "learning_rate": 1.6431222851626672e-06, + "loss": 0.5375, + "step": 11766 + }, + { + "epoch": 0.4855574812247256, + "grad_norm": 2.3772115147519544, + "learning_rate": 1.6429227225324811e-06, + "loss": 0.5042, + "step": 11767 + }, + { + "epoch": 0.4855987455640835, + "grad_norm": 3.5626155286881955, + "learning_rate": 1.642723157349338e-06, + "loss": 0.5134, + "step": 11768 + }, + { + "epoch": 0.48564000990344147, + "grad_norm": 3.054081521613171, + "learning_rate": 1.6425235896168023e-06, + "loss": 0.5463, + "step": 11769 + }, + { + "epoch": 0.4856812742427994, + "grad_norm": 2.5038807955227327, + "learning_rate": 1.6423240193384379e-06, + "loss": 0.5815, + "step": 11770 + }, + { + "epoch": 0.4857225385821573, + "grad_norm": 72.8075852987682, + "learning_rate": 1.6421244465178112e-06, + "loss": 0.5093, + "step": 11771 + }, + { + "epoch": 0.4857638029215152, + "grad_norm": 2.405631593632576, + "learning_rate": 1.641924871158486e-06, + "loss": 0.5248, + "step": 11772 + }, + { + "epoch": 0.48580506726087314, + "grad_norm": 5.525604408794335, + "learning_rate": 1.641725293264028e-06, + "loss": 0.5536, + "step": 11773 + }, + { + "epoch": 0.4858463316002311, + "grad_norm": 2.473497473542455, + "learning_rate": 1.6415257128380012e-06, + "loss": 0.5563, + "step": 11774 + }, + { + "epoch": 0.485887595939589, + "grad_norm": 3.1008820195873255, + "learning_rate": 1.6413261298839717e-06, + "loss": 0.5456, + "step": 11775 + }, + { + "epoch": 0.48592886027894694, + "grad_norm": 3.40029116440308, + "learning_rate": 1.6411265444055035e-06, + "loss": 0.5486, + "step": 11776 + }, + { + "epoch": 0.48597012461830486, + "grad_norm": 3.31505996999202, + "learning_rate": 1.6409269564061624e-06, + "loss": 0.5374, + "step": 11777 + }, + { + "epoch": 0.48601138895766277, + "grad_norm": 2.150373646130545, + "learning_rate": 1.640727365889513e-06, + "loss": 0.5635, + "step": 11778 + }, + { + "epoch": 0.4860526532970207, + "grad_norm": 9.51280219108989, + "learning_rate": 1.6405277728591211e-06, + "loss": 0.5609, + "step": 11779 + }, + { + "epoch": 0.48609391763637866, + "grad_norm": 12.128973000085772, + "learning_rate": 1.6403281773185513e-06, + "loss": 0.5794, + "step": 11780 + }, + { + "epoch": 0.4861351819757366, + "grad_norm": 2.264268280614986, + "learning_rate": 1.64012857927137e-06, + "loss": 0.5209, + "step": 11781 + }, + { + "epoch": 0.4861764463150945, + "grad_norm": 6.102802204332055, + "learning_rate": 1.639928978721141e-06, + "loss": 0.5909, + "step": 11782 + }, + { + "epoch": 0.4862177106544524, + "grad_norm": 2.825312764809096, + "learning_rate": 1.6397293756714306e-06, + "loss": 0.5408, + "step": 11783 + }, + { + "epoch": 0.4862589749938103, + "grad_norm": 3.734298917737308, + "learning_rate": 1.6395297701258038e-06, + "loss": 0.4709, + "step": 11784 + }, + { + "epoch": 0.4863002393331683, + "grad_norm": 5.878572298240344, + "learning_rate": 1.6393301620878264e-06, + "loss": 0.5448, + "step": 11785 + }, + { + "epoch": 0.4863415036725262, + "grad_norm": 2.5827371057480435, + "learning_rate": 1.6391305515610637e-06, + "loss": 0.4813, + "step": 11786 + }, + { + "epoch": 0.4863827680118841, + "grad_norm": 3.1537283872404327, + "learning_rate": 1.6389309385490814e-06, + "loss": 0.5785, + "step": 11787 + }, + { + "epoch": 0.48642403235124204, + "grad_norm": 2.335247653516847, + "learning_rate": 1.6387313230554452e-06, + "loss": 0.5571, + "step": 11788 + }, + { + "epoch": 0.48646529669059996, + "grad_norm": 3.9331921369766873, + "learning_rate": 1.6385317050837205e-06, + "loss": 0.527, + "step": 11789 + }, + { + "epoch": 0.48650656102995793, + "grad_norm": 4.986436312321049, + "learning_rate": 1.6383320846374723e-06, + "loss": 0.5338, + "step": 11790 + }, + { + "epoch": 0.48654782536931585, + "grad_norm": 3.2289999991112746, + "learning_rate": 1.6381324617202676e-06, + "loss": 0.5275, + "step": 11791 + }, + { + "epoch": 0.48658908970867376, + "grad_norm": 3.6488187763432873, + "learning_rate": 1.6379328363356714e-06, + "loss": 0.5056, + "step": 11792 + }, + { + "epoch": 0.4866303540480317, + "grad_norm": 2.8625628543049397, + "learning_rate": 1.63773320848725e-06, + "loss": 0.5323, + "step": 11793 + }, + { + "epoch": 0.4866716183873896, + "grad_norm": 2.72540782549213, + "learning_rate": 1.6375335781785686e-06, + "loss": 0.5338, + "step": 11794 + }, + { + "epoch": 0.48671288272674756, + "grad_norm": 2.7437885656049077, + "learning_rate": 1.6373339454131937e-06, + "loss": 0.5457, + "step": 11795 + }, + { + "epoch": 0.4867541470661055, + "grad_norm": 4.049929626158092, + "learning_rate": 1.6371343101946906e-06, + "loss": 0.6062, + "step": 11796 + }, + { + "epoch": 0.4867954114054634, + "grad_norm": 3.6894221444777235, + "learning_rate": 1.6369346725266262e-06, + "loss": 0.6093, + "step": 11797 + }, + { + "epoch": 0.4868366757448213, + "grad_norm": 2.0062847286528793, + "learning_rate": 1.6367350324125651e-06, + "loss": 0.5047, + "step": 11798 + }, + { + "epoch": 0.48687794008417923, + "grad_norm": 3.128367596871201, + "learning_rate": 1.6365353898560753e-06, + "loss": 0.5302, + "step": 11799 + }, + { + "epoch": 0.4869192044235372, + "grad_norm": 2.3419306805274265, + "learning_rate": 1.636335744860721e-06, + "loss": 0.5007, + "step": 11800 + }, + { + "epoch": 0.4869604687628951, + "grad_norm": 3.2026062828310695, + "learning_rate": 1.6361360974300698e-06, + "loss": 0.5307, + "step": 11801 + }, + { + "epoch": 0.48700173310225303, + "grad_norm": 15.170321615347165, + "learning_rate": 1.6359364475676873e-06, + "loss": 0.488, + "step": 11802 + }, + { + "epoch": 0.48704299744161095, + "grad_norm": 1.7958043543672284, + "learning_rate": 1.6357367952771397e-06, + "loss": 0.5069, + "step": 11803 + }, + { + "epoch": 0.48708426178096886, + "grad_norm": 44.344476553184535, + "learning_rate": 1.6355371405619933e-06, + "loss": 0.5805, + "step": 11804 + }, + { + "epoch": 0.48712552612032683, + "grad_norm": 2.881865589765261, + "learning_rate": 1.6353374834258144e-06, + "loss": 0.5469, + "step": 11805 + }, + { + "epoch": 0.48716679045968475, + "grad_norm": 3.206765764533633, + "learning_rate": 1.6351378238721698e-06, + "loss": 0.4868, + "step": 11806 + }, + { + "epoch": 0.48720805479904267, + "grad_norm": 3.3697827321628204, + "learning_rate": 1.6349381619046255e-06, + "loss": 0.556, + "step": 11807 + }, + { + "epoch": 0.4872493191384006, + "grad_norm": 3.813781337395956, + "learning_rate": 1.634738497526748e-06, + "loss": 0.5189, + "step": 11808 + }, + { + "epoch": 0.4872905834777585, + "grad_norm": 3.5717624126776353, + "learning_rate": 1.6345388307421036e-06, + "loss": 0.5765, + "step": 11809 + }, + { + "epoch": 0.48733184781711647, + "grad_norm": 3.663530987268133, + "learning_rate": 1.6343391615542596e-06, + "loss": 0.5665, + "step": 11810 + }, + { + "epoch": 0.4873731121564744, + "grad_norm": 10.119621258629989, + "learning_rate": 1.634139489966782e-06, + "loss": 0.5541, + "step": 11811 + }, + { + "epoch": 0.4874143764958323, + "grad_norm": 2.5700942426457534, + "learning_rate": 1.6339398159832374e-06, + "loss": 0.5243, + "step": 11812 + }, + { + "epoch": 0.4874556408351902, + "grad_norm": 2.4199136634334426, + "learning_rate": 1.6337401396071923e-06, + "loss": 0.5247, + "step": 11813 + }, + { + "epoch": 0.48749690517454813, + "grad_norm": 4.270571027176735, + "learning_rate": 1.6335404608422147e-06, + "loss": 0.5382, + "step": 11814 + }, + { + "epoch": 0.4875381695139061, + "grad_norm": 4.33752362782438, + "learning_rate": 1.6333407796918693e-06, + "loss": 0.4945, + "step": 11815 + }, + { + "epoch": 0.487579433853264, + "grad_norm": 3.5618426498214215, + "learning_rate": 1.6331410961597249e-06, + "loss": 0.5113, + "step": 11816 + }, + { + "epoch": 0.48762069819262194, + "grad_norm": 13.199648629345345, + "learning_rate": 1.632941410249347e-06, + "loss": 0.5257, + "step": 11817 + }, + { + "epoch": 0.48766196253197985, + "grad_norm": 1.9793306501487489, + "learning_rate": 1.6327417219643028e-06, + "loss": 0.5346, + "step": 11818 + }, + { + "epoch": 0.48770322687133777, + "grad_norm": 4.5428434134078115, + "learning_rate": 1.6325420313081594e-06, + "loss": 0.5222, + "step": 11819 + }, + { + "epoch": 0.48774449121069574, + "grad_norm": 4.196293051664259, + "learning_rate": 1.6323423382844841e-06, + "loss": 0.5568, + "step": 11820 + }, + { + "epoch": 0.48778575555005366, + "grad_norm": 2.269038510307995, + "learning_rate": 1.6321426428968439e-06, + "loss": 0.4927, + "step": 11821 + }, + { + "epoch": 0.4878270198894116, + "grad_norm": 2.8677687194539376, + "learning_rate": 1.6319429451488049e-06, + "loss": 0.4954, + "step": 11822 + }, + { + "epoch": 0.4878682842287695, + "grad_norm": 2.028752966454218, + "learning_rate": 1.6317432450439346e-06, + "loss": 0.5096, + "step": 11823 + }, + { + "epoch": 0.4879095485681274, + "grad_norm": 8.328324414298415, + "learning_rate": 1.631543542585801e-06, + "loss": 0.6096, + "step": 11824 + }, + { + "epoch": 0.4879508129074854, + "grad_norm": 4.037841944250121, + "learning_rate": 1.6313438377779702e-06, + "loss": 0.5568, + "step": 11825 + }, + { + "epoch": 0.4879920772468433, + "grad_norm": 4.75874875862233, + "learning_rate": 1.63114413062401e-06, + "loss": 0.4774, + "step": 11826 + }, + { + "epoch": 0.4880333415862012, + "grad_norm": 2.1682732768936352, + "learning_rate": 1.630944421127488e-06, + "loss": 0.5246, + "step": 11827 + }, + { + "epoch": 0.4880746059255591, + "grad_norm": 9.123247898960878, + "learning_rate": 1.630744709291971e-06, + "loss": 0.5406, + "step": 11828 + }, + { + "epoch": 0.48811587026491704, + "grad_norm": 10.158783712705775, + "learning_rate": 1.6305449951210257e-06, + "loss": 0.5654, + "step": 11829 + }, + { + "epoch": 0.488157134604275, + "grad_norm": 3.379685845251312, + "learning_rate": 1.6303452786182208e-06, + "loss": 0.5242, + "step": 11830 + }, + { + "epoch": 0.4881983989436329, + "grad_norm": 2.515879935652781, + "learning_rate": 1.6301455597871226e-06, + "loss": 0.5177, + "step": 11831 + }, + { + "epoch": 0.48823966328299084, + "grad_norm": 11.46304504038191, + "learning_rate": 1.6299458386312997e-06, + "loss": 0.5226, + "step": 11832 + }, + { + "epoch": 0.48828092762234876, + "grad_norm": 1.9617585116098046, + "learning_rate": 1.6297461151543185e-06, + "loss": 0.4912, + "step": 11833 + }, + { + "epoch": 0.4883221919617067, + "grad_norm": 2.472361983681789, + "learning_rate": 1.6295463893597477e-06, + "loss": 0.5018, + "step": 11834 + }, + { + "epoch": 0.48836345630106465, + "grad_norm": 2.407087554340566, + "learning_rate": 1.6293466612511543e-06, + "loss": 0.5075, + "step": 11835 + }, + { + "epoch": 0.48840472064042256, + "grad_norm": 1.7911776129180461, + "learning_rate": 1.6291469308321059e-06, + "loss": 0.4791, + "step": 11836 + }, + { + "epoch": 0.4884459849797805, + "grad_norm": 4.2767207328474415, + "learning_rate": 1.62894719810617e-06, + "loss": 0.5343, + "step": 11837 + }, + { + "epoch": 0.4884872493191384, + "grad_norm": 5.241879327283829, + "learning_rate": 1.6287474630769145e-06, + "loss": 0.5641, + "step": 11838 + }, + { + "epoch": 0.4885285136584963, + "grad_norm": 2.448114886068009, + "learning_rate": 1.6285477257479072e-06, + "loss": 0.4988, + "step": 11839 + }, + { + "epoch": 0.4885697779978543, + "grad_norm": 3.750360460052943, + "learning_rate": 1.6283479861227164e-06, + "loss": 0.5058, + "step": 11840 + }, + { + "epoch": 0.4886110423372122, + "grad_norm": 2.6811838392028466, + "learning_rate": 1.6281482442049094e-06, + "loss": 0.5229, + "step": 11841 + }, + { + "epoch": 0.4886523066765701, + "grad_norm": 2.6469412265429333, + "learning_rate": 1.6279484999980536e-06, + "loss": 0.5299, + "step": 11842 + }, + { + "epoch": 0.48869357101592803, + "grad_norm": 2.3706841286083558, + "learning_rate": 1.6277487535057179e-06, + "loss": 0.5112, + "step": 11843 + }, + { + "epoch": 0.48873483535528595, + "grad_norm": 3.4860393002342698, + "learning_rate": 1.6275490047314697e-06, + "loss": 0.4984, + "step": 11844 + }, + { + "epoch": 0.48877609969464386, + "grad_norm": 4.442488363497315, + "learning_rate": 1.6273492536788773e-06, + "loss": 0.513, + "step": 11845 + }, + { + "epoch": 0.48881736403400183, + "grad_norm": 4.97346013324061, + "learning_rate": 1.6271495003515092e-06, + "loss": 0.553, + "step": 11846 + }, + { + "epoch": 0.48885862837335975, + "grad_norm": 2.4505594705304854, + "learning_rate": 1.6269497447529325e-06, + "loss": 0.4896, + "step": 11847 + }, + { + "epoch": 0.48889989271271767, + "grad_norm": 2.749801136891664, + "learning_rate": 1.6267499868867157e-06, + "loss": 0.5164, + "step": 11848 + }, + { + "epoch": 0.4889411570520756, + "grad_norm": 2.0749483714037584, + "learning_rate": 1.626550226756427e-06, + "loss": 0.4733, + "step": 11849 + }, + { + "epoch": 0.4889824213914335, + "grad_norm": 3.3687254760830587, + "learning_rate": 1.6263504643656348e-06, + "loss": 0.5285, + "step": 11850 + }, + { + "epoch": 0.48902368573079147, + "grad_norm": 4.611464957132178, + "learning_rate": 1.626150699717907e-06, + "loss": 0.522, + "step": 11851 + }, + { + "epoch": 0.4890649500701494, + "grad_norm": 2.9373792290483287, + "learning_rate": 1.6259509328168124e-06, + "loss": 0.5539, + "step": 11852 + }, + { + "epoch": 0.4891062144095073, + "grad_norm": 2.4982806079785895, + "learning_rate": 1.6257511636659188e-06, + "loss": 0.5268, + "step": 11853 + }, + { + "epoch": 0.4891474787488652, + "grad_norm": 6.6347100400143395, + "learning_rate": 1.6255513922687955e-06, + "loss": 0.5387, + "step": 11854 + }, + { + "epoch": 0.48918874308822313, + "grad_norm": 3.1545976296429865, + "learning_rate": 1.6253516186290098e-06, + "loss": 0.4981, + "step": 11855 + }, + { + "epoch": 0.4892300074275811, + "grad_norm": 8.924981583787412, + "learning_rate": 1.6251518427501304e-06, + "loss": 0.5811, + "step": 11856 + }, + { + "epoch": 0.489271271766939, + "grad_norm": 2.283828781808568, + "learning_rate": 1.6249520646357263e-06, + "loss": 0.4974, + "step": 11857 + }, + { + "epoch": 0.48931253610629694, + "grad_norm": 3.913202307129191, + "learning_rate": 1.6247522842893653e-06, + "loss": 0.5116, + "step": 11858 + }, + { + "epoch": 0.48935380044565485, + "grad_norm": 2.825079138414789, + "learning_rate": 1.6245525017146168e-06, + "loss": 0.5538, + "step": 11859 + }, + { + "epoch": 0.48939506478501277, + "grad_norm": 1.910073343087645, + "learning_rate": 1.6243527169150493e-06, + "loss": 0.4705, + "step": 11860 + }, + { + "epoch": 0.48943632912437074, + "grad_norm": 7.192209344727441, + "learning_rate": 1.624152929894231e-06, + "loss": 0.5399, + "step": 11861 + }, + { + "epoch": 0.48947759346372866, + "grad_norm": 2.497550756552147, + "learning_rate": 1.6239531406557302e-06, + "loss": 0.5306, + "step": 11862 + }, + { + "epoch": 0.48951885780308657, + "grad_norm": 2.941104332483467, + "learning_rate": 1.623753349203117e-06, + "loss": 0.552, + "step": 11863 + }, + { + "epoch": 0.4895601221424445, + "grad_norm": 3.4719928617401337, + "learning_rate": 1.623553555539959e-06, + "loss": 0.4941, + "step": 11864 + }, + { + "epoch": 0.4896013864818024, + "grad_norm": 3.954855350061876, + "learning_rate": 1.6233537596698257e-06, + "loss": 0.5502, + "step": 11865 + }, + { + "epoch": 0.4896426508211604, + "grad_norm": 3.2363831277670374, + "learning_rate": 1.623153961596285e-06, + "loss": 0.5334, + "step": 11866 + }, + { + "epoch": 0.4896839151605183, + "grad_norm": 5.028475924183722, + "learning_rate": 1.6229541613229074e-06, + "loss": 0.4652, + "step": 11867 + }, + { + "epoch": 0.4897251794998762, + "grad_norm": 2.3133275786321557, + "learning_rate": 1.62275435885326e-06, + "loss": 0.4806, + "step": 11868 + }, + { + "epoch": 0.4897664438392341, + "grad_norm": 2.6690369545359744, + "learning_rate": 1.6225545541909136e-06, + "loss": 0.5461, + "step": 11869 + }, + { + "epoch": 0.48980770817859204, + "grad_norm": 3.7574733276755765, + "learning_rate": 1.6223547473394355e-06, + "loss": 0.6055, + "step": 11870 + }, + { + "epoch": 0.48984897251795, + "grad_norm": 2.0964398072656265, + "learning_rate": 1.622154938302396e-06, + "loss": 0.4777, + "step": 11871 + }, + { + "epoch": 0.4898902368573079, + "grad_norm": 6.3059785980521506, + "learning_rate": 1.6219551270833631e-06, + "loss": 0.5197, + "step": 11872 + }, + { + "epoch": 0.48993150119666584, + "grad_norm": 2.261282798665093, + "learning_rate": 1.6217553136859072e-06, + "loss": 0.4997, + "step": 11873 + }, + { + "epoch": 0.48997276553602376, + "grad_norm": 2.9679785090025956, + "learning_rate": 1.6215554981135966e-06, + "loss": 0.5502, + "step": 11874 + }, + { + "epoch": 0.4900140298753817, + "grad_norm": 2.9664334328580453, + "learning_rate": 1.6213556803700007e-06, + "loss": 0.5102, + "step": 11875 + }, + { + "epoch": 0.49005529421473965, + "grad_norm": 2.926284635791469, + "learning_rate": 1.6211558604586887e-06, + "loss": 0.5303, + "step": 11876 + }, + { + "epoch": 0.49009655855409756, + "grad_norm": 7.316806998130062, + "learning_rate": 1.6209560383832301e-06, + "loss": 0.565, + "step": 11877 + }, + { + "epoch": 0.4901378228934555, + "grad_norm": 2.8815168127622064, + "learning_rate": 1.6207562141471938e-06, + "loss": 0.4581, + "step": 11878 + }, + { + "epoch": 0.4901790872328134, + "grad_norm": 3.0490096117354195, + "learning_rate": 1.6205563877541497e-06, + "loss": 0.5708, + "step": 11879 + }, + { + "epoch": 0.4902203515721713, + "grad_norm": 3.9330176840063276, + "learning_rate": 1.6203565592076671e-06, + "loss": 0.4924, + "step": 11880 + }, + { + "epoch": 0.4902616159115293, + "grad_norm": 3.406706110795903, + "learning_rate": 1.6201567285113145e-06, + "loss": 0.5482, + "step": 11881 + }, + { + "epoch": 0.4903028802508872, + "grad_norm": 4.375316623786088, + "learning_rate": 1.6199568956686628e-06, + "loss": 0.529, + "step": 11882 + }, + { + "epoch": 0.4903441445902451, + "grad_norm": 8.92661574022098, + "learning_rate": 1.6197570606832805e-06, + "loss": 0.4857, + "step": 11883 + }, + { + "epoch": 0.49038540892960303, + "grad_norm": 3.0842988039908112, + "learning_rate": 1.619557223558738e-06, + "loss": 0.4979, + "step": 11884 + }, + { + "epoch": 0.49042667326896094, + "grad_norm": 9.893071119566267, + "learning_rate": 1.6193573842986038e-06, + "loss": 0.5389, + "step": 11885 + }, + { + "epoch": 0.4904679376083189, + "grad_norm": 3.464201747795178, + "learning_rate": 1.6191575429064487e-06, + "loss": 0.5872, + "step": 11886 + }, + { + "epoch": 0.49050920194767683, + "grad_norm": 4.294963593170518, + "learning_rate": 1.6189576993858416e-06, + "loss": 0.5087, + "step": 11887 + }, + { + "epoch": 0.49055046628703475, + "grad_norm": 2.414401859249746, + "learning_rate": 1.6187578537403523e-06, + "loss": 0.4937, + "step": 11888 + }, + { + "epoch": 0.49059173062639266, + "grad_norm": 3.9902647086760523, + "learning_rate": 1.6185580059735507e-06, + "loss": 0.5596, + "step": 11889 + }, + { + "epoch": 0.4906329949657506, + "grad_norm": 2.4455411872279114, + "learning_rate": 1.6183581560890065e-06, + "loss": 0.4816, + "step": 11890 + }, + { + "epoch": 0.49067425930510855, + "grad_norm": 3.5773650283163785, + "learning_rate": 1.6181583040902894e-06, + "loss": 0.5689, + "step": 11891 + }, + { + "epoch": 0.49071552364446647, + "grad_norm": 6.576569525018983, + "learning_rate": 1.6179584499809696e-06, + "loss": 0.5229, + "step": 11892 + }, + { + "epoch": 0.4907567879838244, + "grad_norm": 8.93255213527473, + "learning_rate": 1.6177585937646169e-06, + "loss": 0.4856, + "step": 11893 + }, + { + "epoch": 0.4907980523231823, + "grad_norm": 1.8663358284275704, + "learning_rate": 1.6175587354448013e-06, + "loss": 0.4762, + "step": 11894 + }, + { + "epoch": 0.4908393166625402, + "grad_norm": 6.252070149611598, + "learning_rate": 1.617358875025092e-06, + "loss": 0.5846, + "step": 11895 + }, + { + "epoch": 0.4908805810018982, + "grad_norm": 3.5528767376293393, + "learning_rate": 1.61715901250906e-06, + "loss": 0.532, + "step": 11896 + }, + { + "epoch": 0.4909218453412561, + "grad_norm": 4.732485375790162, + "learning_rate": 1.6169591479002743e-06, + "loss": 0.5383, + "step": 11897 + }, + { + "epoch": 0.490963109680614, + "grad_norm": 3.9582629276176413, + "learning_rate": 1.6167592812023065e-06, + "loss": 0.5426, + "step": 11898 + }, + { + "epoch": 0.49100437401997193, + "grad_norm": 2.446128876883777, + "learning_rate": 1.6165594124187253e-06, + "loss": 0.4782, + "step": 11899 + }, + { + "epoch": 0.49104563835932985, + "grad_norm": 2.2695371303301646, + "learning_rate": 1.6163595415531021e-06, + "loss": 0.5673, + "step": 11900 + }, + { + "epoch": 0.4910869026986878, + "grad_norm": 2.4678147970901825, + "learning_rate": 1.6161596686090056e-06, + "loss": 0.4772, + "step": 11901 + }, + { + "epoch": 0.49112816703804574, + "grad_norm": 3.1900587221888896, + "learning_rate": 1.6159597935900076e-06, + "loss": 0.5562, + "step": 11902 + }, + { + "epoch": 0.49116943137740365, + "grad_norm": 3.1538475227915397, + "learning_rate": 1.6157599164996767e-06, + "loss": 0.5133, + "step": 11903 + }, + { + "epoch": 0.49121069571676157, + "grad_norm": 3.1536998821397777, + "learning_rate": 1.615560037341585e-06, + "loss": 0.4913, + "step": 11904 + }, + { + "epoch": 0.4912519600561195, + "grad_norm": 5.946021133921016, + "learning_rate": 1.6153601561193011e-06, + "loss": 0.5701, + "step": 11905 + }, + { + "epoch": 0.4912932243954774, + "grad_norm": 4.75528266177663, + "learning_rate": 1.6151602728363973e-06, + "loss": 0.5181, + "step": 11906 + }, + { + "epoch": 0.4913344887348354, + "grad_norm": 3.295389460689323, + "learning_rate": 1.614960387496442e-06, + "loss": 0.5536, + "step": 11907 + }, + { + "epoch": 0.4913757530741933, + "grad_norm": 2.9519844324010873, + "learning_rate": 1.6147605001030069e-06, + "loss": 0.4878, + "step": 11908 + }, + { + "epoch": 0.4914170174135512, + "grad_norm": 16.27392964016582, + "learning_rate": 1.614560610659662e-06, + "loss": 0.5264, + "step": 11909 + }, + { + "epoch": 0.4914582817529091, + "grad_norm": 3.36763853328462, + "learning_rate": 1.614360719169978e-06, + "loss": 0.51, + "step": 11910 + }, + { + "epoch": 0.49149954609226704, + "grad_norm": 3.5605703417087264, + "learning_rate": 1.6141608256375255e-06, + "loss": 0.5203, + "step": 11911 + }, + { + "epoch": 0.491540810431625, + "grad_norm": 2.9235213494786687, + "learning_rate": 1.6139609300658755e-06, + "loss": 0.5787, + "step": 11912 + }, + { + "epoch": 0.4915820747709829, + "grad_norm": 5.006079056497365, + "learning_rate": 1.6137610324585978e-06, + "loss": 0.5618, + "step": 11913 + }, + { + "epoch": 0.49162333911034084, + "grad_norm": 4.52777116725699, + "learning_rate": 1.6135611328192637e-06, + "loss": 0.4919, + "step": 11914 + }, + { + "epoch": 0.49166460344969876, + "grad_norm": 2.597819762458575, + "learning_rate": 1.6133612311514433e-06, + "loss": 0.5828, + "step": 11915 + }, + { + "epoch": 0.49170586778905667, + "grad_norm": 2.975370499248587, + "learning_rate": 1.6131613274587082e-06, + "loss": 0.4999, + "step": 11916 + }, + { + "epoch": 0.49174713212841464, + "grad_norm": 2.581076323203193, + "learning_rate": 1.6129614217446283e-06, + "loss": 0.5505, + "step": 11917 + }, + { + "epoch": 0.49178839646777256, + "grad_norm": 3.899570563340656, + "learning_rate": 1.6127615140127751e-06, + "loss": 0.5348, + "step": 11918 + }, + { + "epoch": 0.4918296608071305, + "grad_norm": 4.126965606877781, + "learning_rate": 1.6125616042667191e-06, + "loss": 0.5516, + "step": 11919 + }, + { + "epoch": 0.4918709251464884, + "grad_norm": 4.920718930941918, + "learning_rate": 1.6123616925100311e-06, + "loss": 0.4789, + "step": 11920 + }, + { + "epoch": 0.4919121894858463, + "grad_norm": 3.790739934774936, + "learning_rate": 1.6121617787462822e-06, + "loss": 0.5684, + "step": 11921 + }, + { + "epoch": 0.4919534538252043, + "grad_norm": 13.582129999363904, + "learning_rate": 1.6119618629790434e-06, + "loss": 0.5127, + "step": 11922 + }, + { + "epoch": 0.4919947181645622, + "grad_norm": 2.2735320901787803, + "learning_rate": 1.611761945211886e-06, + "loss": 0.4984, + "step": 11923 + }, + { + "epoch": 0.4920359825039201, + "grad_norm": 4.385339344201771, + "learning_rate": 1.6115620254483802e-06, + "loss": 0.5839, + "step": 11924 + }, + { + "epoch": 0.492077246843278, + "grad_norm": 2.7639268963078254, + "learning_rate": 1.6113621036920977e-06, + "loss": 0.568, + "step": 11925 + }, + { + "epoch": 0.49211851118263594, + "grad_norm": 2.998226480534163, + "learning_rate": 1.6111621799466098e-06, + "loss": 0.5191, + "step": 11926 + }, + { + "epoch": 0.4921597755219939, + "grad_norm": 2.725692199717142, + "learning_rate": 1.610962254215487e-06, + "loss": 0.4876, + "step": 11927 + }, + { + "epoch": 0.49220103986135183, + "grad_norm": 3.050111155830015, + "learning_rate": 1.6107623265023003e-06, + "loss": 0.5561, + "step": 11928 + }, + { + "epoch": 0.49224230420070975, + "grad_norm": 4.972070050567176, + "learning_rate": 1.6105623968106222e-06, + "loss": 0.5163, + "step": 11929 + }, + { + "epoch": 0.49228356854006766, + "grad_norm": 4.310226710066971, + "learning_rate": 1.6103624651440223e-06, + "loss": 0.5542, + "step": 11930 + }, + { + "epoch": 0.4923248328794256, + "grad_norm": 2.0813339023668473, + "learning_rate": 1.6101625315060732e-06, + "loss": 0.5191, + "step": 11931 + }, + { + "epoch": 0.49236609721878355, + "grad_norm": 33.236455308688264, + "learning_rate": 1.6099625959003454e-06, + "loss": 0.4755, + "step": 11932 + }, + { + "epoch": 0.49240736155814147, + "grad_norm": 6.839215726106154, + "learning_rate": 1.6097626583304111e-06, + "loss": 0.561, + "step": 11933 + }, + { + "epoch": 0.4924486258974994, + "grad_norm": 2.7952275331271825, + "learning_rate": 1.6095627187998406e-06, + "loss": 0.551, + "step": 11934 + }, + { + "epoch": 0.4924898902368573, + "grad_norm": 3.8710700354142076, + "learning_rate": 1.609362777312206e-06, + "loss": 0.5613, + "step": 11935 + }, + { + "epoch": 0.4925311545762152, + "grad_norm": 3.3971599492550664, + "learning_rate": 1.6091628338710785e-06, + "loss": 0.5487, + "step": 11936 + }, + { + "epoch": 0.4925724189155732, + "grad_norm": 2.5822954782177066, + "learning_rate": 1.60896288848003e-06, + "loss": 0.5177, + "step": 11937 + }, + { + "epoch": 0.4926136832549311, + "grad_norm": 21.94590980255942, + "learning_rate": 1.6087629411426311e-06, + "loss": 0.513, + "step": 11938 + }, + { + "epoch": 0.492654947594289, + "grad_norm": 3.566186053271997, + "learning_rate": 1.6085629918624553e-06, + "loss": 0.5954, + "step": 11939 + }, + { + "epoch": 0.49269621193364693, + "grad_norm": 15.594479815013559, + "learning_rate": 1.6083630406430718e-06, + "loss": 0.5933, + "step": 11940 + }, + { + "epoch": 0.49273747627300485, + "grad_norm": 7.190241586127286, + "learning_rate": 1.6081630874880533e-06, + "loss": 0.5158, + "step": 11941 + }, + { + "epoch": 0.4927787406123628, + "grad_norm": 2.324887547144993, + "learning_rate": 1.6079631324009718e-06, + "loss": 0.5497, + "step": 11942 + }, + { + "epoch": 0.49282000495172074, + "grad_norm": 3.5701523655389065, + "learning_rate": 1.6077631753853985e-06, + "loss": 0.5535, + "step": 11943 + }, + { + "epoch": 0.49286126929107865, + "grad_norm": 2.6991674521892257, + "learning_rate": 1.6075632164449054e-06, + "loss": 0.5067, + "step": 11944 + }, + { + "epoch": 0.49290253363043657, + "grad_norm": 2.088576016122242, + "learning_rate": 1.607363255583064e-06, + "loss": 0.4795, + "step": 11945 + }, + { + "epoch": 0.4929437979697945, + "grad_norm": 5.73379554019471, + "learning_rate": 1.6071632928034468e-06, + "loss": 0.567, + "step": 11946 + }, + { + "epoch": 0.49298506230915246, + "grad_norm": 3.14126245962455, + "learning_rate": 1.606963328109625e-06, + "loss": 0.5406, + "step": 11947 + }, + { + "epoch": 0.49302632664851037, + "grad_norm": 3.46942192408358, + "learning_rate": 1.6067633615051698e-06, + "loss": 0.5325, + "step": 11948 + }, + { + "epoch": 0.4930675909878683, + "grad_norm": 4.56375129873328, + "learning_rate": 1.6065633929936548e-06, + "loss": 0.512, + "step": 11949 + }, + { + "epoch": 0.4931088553272262, + "grad_norm": 2.9457092632851936, + "learning_rate": 1.6063634225786502e-06, + "loss": 0.5157, + "step": 11950 + }, + { + "epoch": 0.4931501196665841, + "grad_norm": 2.1956443381624906, + "learning_rate": 1.6061634502637294e-06, + "loss": 0.5321, + "step": 11951 + }, + { + "epoch": 0.4931913840059421, + "grad_norm": 7.222177083256402, + "learning_rate": 1.605963476052464e-06, + "loss": 0.4778, + "step": 11952 + }, + { + "epoch": 0.4932326483453, + "grad_norm": 2.7659396774872693, + "learning_rate": 1.6057634999484255e-06, + "loss": 0.5677, + "step": 11953 + }, + { + "epoch": 0.4932739126846579, + "grad_norm": 2.8170668544462165, + "learning_rate": 1.605563521955186e-06, + "loss": 0.5222, + "step": 11954 + }, + { + "epoch": 0.49331517702401584, + "grad_norm": 2.7746831754957615, + "learning_rate": 1.6053635420763183e-06, + "loss": 0.5137, + "step": 11955 + }, + { + "epoch": 0.49335644136337375, + "grad_norm": 3.2757775190207283, + "learning_rate": 1.6051635603153936e-06, + "loss": 0.5312, + "step": 11956 + }, + { + "epoch": 0.4933977057027317, + "grad_norm": 5.576336463655991, + "learning_rate": 1.6049635766759853e-06, + "loss": 0.4943, + "step": 11957 + }, + { + "epoch": 0.49343897004208964, + "grad_norm": 5.162953386981772, + "learning_rate": 1.6047635911616645e-06, + "loss": 0.534, + "step": 11958 + }, + { + "epoch": 0.49348023438144756, + "grad_norm": 2.2504639423374395, + "learning_rate": 1.6045636037760042e-06, + "loss": 0.5056, + "step": 11959 + }, + { + "epoch": 0.4935214987208055, + "grad_norm": 2.609723965717068, + "learning_rate": 1.6043636145225765e-06, + "loss": 0.5631, + "step": 11960 + }, + { + "epoch": 0.4935627630601634, + "grad_norm": 3.5237706844010113, + "learning_rate": 1.604163623404953e-06, + "loss": 0.5514, + "step": 11961 + }, + { + "epoch": 0.49360402739952136, + "grad_norm": 2.686775883655425, + "learning_rate": 1.6039636304267073e-06, + "loss": 0.5527, + "step": 11962 + }, + { + "epoch": 0.4936452917388793, + "grad_norm": 2.1818367155194833, + "learning_rate": 1.6037636355914102e-06, + "loss": 0.5764, + "step": 11963 + }, + { + "epoch": 0.4936865560782372, + "grad_norm": 2.3591121957597045, + "learning_rate": 1.603563638902636e-06, + "loss": 0.5355, + "step": 11964 + }, + { + "epoch": 0.4937278204175951, + "grad_norm": 4.809573970795745, + "learning_rate": 1.6033636403639557e-06, + "loss": 0.6021, + "step": 11965 + }, + { + "epoch": 0.493769084756953, + "grad_norm": 7.41867667054478, + "learning_rate": 1.6031636399789423e-06, + "loss": 0.5095, + "step": 11966 + }, + { + "epoch": 0.49381034909631094, + "grad_norm": 4.160534524174116, + "learning_rate": 1.6029636377511677e-06, + "loss": 0.5477, + "step": 11967 + }, + { + "epoch": 0.4938516134356689, + "grad_norm": 14.096378784648198, + "learning_rate": 1.6027636336842057e-06, + "loss": 0.5281, + "step": 11968 + }, + { + "epoch": 0.49389287777502683, + "grad_norm": 2.6602955278769036, + "learning_rate": 1.6025636277816272e-06, + "loss": 0.5175, + "step": 11969 + }, + { + "epoch": 0.49393414211438474, + "grad_norm": 2.16965000747198, + "learning_rate": 1.6023636200470066e-06, + "loss": 0.5068, + "step": 11970 + }, + { + "epoch": 0.49397540645374266, + "grad_norm": 4.86121920329545, + "learning_rate": 1.602163610483915e-06, + "loss": 0.5457, + "step": 11971 + }, + { + "epoch": 0.4940166707931006, + "grad_norm": 9.871760251384458, + "learning_rate": 1.6019635990959265e-06, + "loss": 0.5546, + "step": 11972 + }, + { + "epoch": 0.49405793513245855, + "grad_norm": 3.4104272215402367, + "learning_rate": 1.6017635858866125e-06, + "loss": 0.5263, + "step": 11973 + }, + { + "epoch": 0.49409919947181646, + "grad_norm": 3.483737783557211, + "learning_rate": 1.6015635708595466e-06, + "loss": 0.5775, + "step": 11974 + }, + { + "epoch": 0.4941404638111744, + "grad_norm": 2.8286122282469863, + "learning_rate": 1.601363554018301e-06, + "loss": 0.5065, + "step": 11975 + }, + { + "epoch": 0.4941817281505323, + "grad_norm": 4.8703507719111565, + "learning_rate": 1.6011635353664489e-06, + "loss": 0.529, + "step": 11976 + }, + { + "epoch": 0.4942229924898902, + "grad_norm": 3.2608186707699907, + "learning_rate": 1.6009635149075626e-06, + "loss": 0.5424, + "step": 11977 + }, + { + "epoch": 0.4942642568292482, + "grad_norm": 3.0975296469494684, + "learning_rate": 1.6007634926452156e-06, + "loss": 0.543, + "step": 11978 + }, + { + "epoch": 0.4943055211686061, + "grad_norm": 8.183635330538323, + "learning_rate": 1.6005634685829808e-06, + "loss": 0.5627, + "step": 11979 + }, + { + "epoch": 0.494346785507964, + "grad_norm": 3.498683620897084, + "learning_rate": 1.600363442724431e-06, + "loss": 0.4984, + "step": 11980 + }, + { + "epoch": 0.49438804984732193, + "grad_norm": 3.4951377160666923, + "learning_rate": 1.6001634150731387e-06, + "loss": 0.5182, + "step": 11981 + }, + { + "epoch": 0.49442931418667985, + "grad_norm": 2.0740138221852957, + "learning_rate": 1.5999633856326773e-06, + "loss": 0.5089, + "step": 11982 + }, + { + "epoch": 0.4944705785260378, + "grad_norm": 2.193284485957132, + "learning_rate": 1.5997633544066196e-06, + "loss": 0.5245, + "step": 11983 + }, + { + "epoch": 0.49451184286539573, + "grad_norm": 3.639238659210494, + "learning_rate": 1.599563321398539e-06, + "loss": 0.5523, + "step": 11984 + }, + { + "epoch": 0.49455310720475365, + "grad_norm": 4.228391978814756, + "learning_rate": 1.5993632866120083e-06, + "loss": 0.5297, + "step": 11985 + }, + { + "epoch": 0.49459437154411157, + "grad_norm": 1.6773854181744488, + "learning_rate": 1.599163250050601e-06, + "loss": 0.479, + "step": 11986 + }, + { + "epoch": 0.4946356358834695, + "grad_norm": 12.16040840833045, + "learning_rate": 1.5989632117178896e-06, + "loss": 0.4813, + "step": 11987 + }, + { + "epoch": 0.49467690022282745, + "grad_norm": 4.14437189212371, + "learning_rate": 1.5987631716174478e-06, + "loss": 0.5158, + "step": 11988 + }, + { + "epoch": 0.49471816456218537, + "grad_norm": 6.606950688527282, + "learning_rate": 1.5985631297528488e-06, + "loss": 0.5466, + "step": 11989 + }, + { + "epoch": 0.4947594289015433, + "grad_norm": 2.376368242683135, + "learning_rate": 1.5983630861276656e-06, + "loss": 0.5403, + "step": 11990 + }, + { + "epoch": 0.4948006932409012, + "grad_norm": 4.230620051866735, + "learning_rate": 1.5981630407454718e-06, + "loss": 0.5686, + "step": 11991 + }, + { + "epoch": 0.4948419575802591, + "grad_norm": 2.831135266526543, + "learning_rate": 1.5979629936098407e-06, + "loss": 0.5275, + "step": 11992 + }, + { + "epoch": 0.4948832219196171, + "grad_norm": 7.942229239651742, + "learning_rate": 1.5977629447243451e-06, + "loss": 0.4777, + "step": 11993 + }, + { + "epoch": 0.494924486258975, + "grad_norm": 2.7142748674829393, + "learning_rate": 1.597562894092559e-06, + "loss": 0.5062, + "step": 11994 + }, + { + "epoch": 0.4949657505983329, + "grad_norm": 3.4657126967012766, + "learning_rate": 1.597362841718055e-06, + "loss": 0.5661, + "step": 11995 + }, + { + "epoch": 0.49500701493769084, + "grad_norm": 2.8251376566953734, + "learning_rate": 1.5971627876044077e-06, + "loss": 0.5348, + "step": 11996 + }, + { + "epoch": 0.49504827927704875, + "grad_norm": 2.80131501204515, + "learning_rate": 1.5969627317551895e-06, + "loss": 0.4825, + "step": 11997 + }, + { + "epoch": 0.4950895436164067, + "grad_norm": 3.0910129289862005, + "learning_rate": 1.5967626741739747e-06, + "loss": 0.4807, + "step": 11998 + }, + { + "epoch": 0.49513080795576464, + "grad_norm": 2.6678435276152688, + "learning_rate": 1.5965626148643361e-06, + "loss": 0.561, + "step": 11999 + }, + { + "epoch": 0.49517207229512256, + "grad_norm": 5.047989704652354, + "learning_rate": 1.5963625538298478e-06, + "loss": 0.5245, + "step": 12000 + }, + { + "epoch": 0.49521333663448047, + "grad_norm": 2.543186674769301, + "learning_rate": 1.5961624910740835e-06, + "loss": 0.5603, + "step": 12001 + }, + { + "epoch": 0.4952546009738384, + "grad_norm": 2.978459762322606, + "learning_rate": 1.5959624266006164e-06, + "loss": 0.583, + "step": 12002 + }, + { + "epoch": 0.49529586531319636, + "grad_norm": 2.6891301612081024, + "learning_rate": 1.5957623604130203e-06, + "loss": 0.4713, + "step": 12003 + }, + { + "epoch": 0.4953371296525543, + "grad_norm": 2.225979741284969, + "learning_rate": 1.5955622925148688e-06, + "loss": 0.5479, + "step": 12004 + }, + { + "epoch": 0.4953783939919122, + "grad_norm": 2.744883359293944, + "learning_rate": 1.5953622229097358e-06, + "loss": 0.5402, + "step": 12005 + }, + { + "epoch": 0.4954196583312701, + "grad_norm": 2.304383420209641, + "learning_rate": 1.5951621516011946e-06, + "loss": 0.4684, + "step": 12006 + }, + { + "epoch": 0.495460922670628, + "grad_norm": 3.9272978194265553, + "learning_rate": 1.5949620785928198e-06, + "loss": 0.5643, + "step": 12007 + }, + { + "epoch": 0.495502187009986, + "grad_norm": 2.933481944177934, + "learning_rate": 1.594762003888184e-06, + "loss": 0.5663, + "step": 12008 + }, + { + "epoch": 0.4955434513493439, + "grad_norm": 3.973552578782913, + "learning_rate": 1.5945619274908626e-06, + "loss": 0.5867, + "step": 12009 + }, + { + "epoch": 0.4955847156887018, + "grad_norm": 2.2281057844861323, + "learning_rate": 1.5943618494044279e-06, + "loss": 0.5663, + "step": 12010 + }, + { + "epoch": 0.49562598002805974, + "grad_norm": 2.7219206723414846, + "learning_rate": 1.594161769632455e-06, + "loss": 0.5284, + "step": 12011 + }, + { + "epoch": 0.49566724436741766, + "grad_norm": 2.5018658501468143, + "learning_rate": 1.5939616881785173e-06, + "loss": 0.5336, + "step": 12012 + }, + { + "epoch": 0.49570850870677563, + "grad_norm": 3.379852851156875, + "learning_rate": 1.593761605046189e-06, + "loss": 0.5145, + "step": 12013 + }, + { + "epoch": 0.49574977304613355, + "grad_norm": 4.147167216713485, + "learning_rate": 1.5935615202390435e-06, + "loss": 0.4963, + "step": 12014 + }, + { + "epoch": 0.49579103738549146, + "grad_norm": 2.60801964734521, + "learning_rate": 1.5933614337606553e-06, + "loss": 0.5476, + "step": 12015 + }, + { + "epoch": 0.4958323017248494, + "grad_norm": 4.23722591575827, + "learning_rate": 1.593161345614598e-06, + "loss": 0.4984, + "step": 12016 + }, + { + "epoch": 0.4958735660642073, + "grad_norm": 3.3979966513020745, + "learning_rate": 1.5929612558044467e-06, + "loss": 0.5271, + "step": 12017 + }, + { + "epoch": 0.49591483040356527, + "grad_norm": 3.6630833683277833, + "learning_rate": 1.5927611643337744e-06, + "loss": 0.4574, + "step": 12018 + }, + { + "epoch": 0.4959560947429232, + "grad_norm": 2.0900036506044137, + "learning_rate": 1.5925610712061557e-06, + "loss": 0.4968, + "step": 12019 + }, + { + "epoch": 0.4959973590822811, + "grad_norm": 3.06035229109154, + "learning_rate": 1.5923609764251647e-06, + "loss": 0.4895, + "step": 12020 + }, + { + "epoch": 0.496038623421639, + "grad_norm": 4.949840694421472, + "learning_rate": 1.5921608799943756e-06, + "loss": 0.532, + "step": 12021 + }, + { + "epoch": 0.49607988776099693, + "grad_norm": 8.028512550184743, + "learning_rate": 1.5919607819173624e-06, + "loss": 0.5188, + "step": 12022 + }, + { + "epoch": 0.4961211521003549, + "grad_norm": 2.803275426649413, + "learning_rate": 1.5917606821977e-06, + "loss": 0.5761, + "step": 12023 + }, + { + "epoch": 0.4961624164397128, + "grad_norm": 3.4010314012500973, + "learning_rate": 1.5915605808389619e-06, + "loss": 0.5442, + "step": 12024 + }, + { + "epoch": 0.49620368077907073, + "grad_norm": 2.2922971180154414, + "learning_rate": 1.5913604778447235e-06, + "loss": 0.5211, + "step": 12025 + }, + { + "epoch": 0.49624494511842865, + "grad_norm": 2.726722716709618, + "learning_rate": 1.5911603732185578e-06, + "loss": 0.5644, + "step": 12026 + }, + { + "epoch": 0.49628620945778656, + "grad_norm": 2.815755606499152, + "learning_rate": 1.59096026696404e-06, + "loss": 0.5346, + "step": 12027 + }, + { + "epoch": 0.4963274737971445, + "grad_norm": 4.956551684960512, + "learning_rate": 1.5907601590847438e-06, + "loss": 0.5118, + "step": 12028 + }, + { + "epoch": 0.49636873813650245, + "grad_norm": 4.118482538199039, + "learning_rate": 1.5905600495842445e-06, + "loss": 0.5383, + "step": 12029 + }, + { + "epoch": 0.49641000247586037, + "grad_norm": 10.15601781966438, + "learning_rate": 1.590359938466116e-06, + "loss": 0.53, + "step": 12030 + }, + { + "epoch": 0.4964512668152183, + "grad_norm": 2.1519236893085423, + "learning_rate": 1.5901598257339336e-06, + "loss": 0.5376, + "step": 12031 + }, + { + "epoch": 0.4964925311545762, + "grad_norm": 3.0158024196202566, + "learning_rate": 1.5899597113912707e-06, + "loss": 0.4739, + "step": 12032 + }, + { + "epoch": 0.4965337954939341, + "grad_norm": 3.141891230732995, + "learning_rate": 1.589759595441702e-06, + "loss": 0.556, + "step": 12033 + }, + { + "epoch": 0.4965750598332921, + "grad_norm": 2.637182111814943, + "learning_rate": 1.5895594778888025e-06, + "loss": 0.4794, + "step": 12034 + }, + { + "epoch": 0.49661632417265, + "grad_norm": 3.999259105053777, + "learning_rate": 1.5893593587361472e-06, + "loss": 0.5734, + "step": 12035 + }, + { + "epoch": 0.4966575885120079, + "grad_norm": 5.169045370457153, + "learning_rate": 1.5891592379873097e-06, + "loss": 0.5129, + "step": 12036 + }, + { + "epoch": 0.49669885285136584, + "grad_norm": 3.0115470198927694, + "learning_rate": 1.5889591156458653e-06, + "loss": 0.4982, + "step": 12037 + }, + { + "epoch": 0.49674011719072375, + "grad_norm": 2.9920385192159005, + "learning_rate": 1.5887589917153885e-06, + "loss": 0.5563, + "step": 12038 + }, + { + "epoch": 0.4967813815300817, + "grad_norm": 3.145802490723895, + "learning_rate": 1.5885588661994542e-06, + "loss": 0.5456, + "step": 12039 + }, + { + "epoch": 0.49682264586943964, + "grad_norm": 4.376815549393989, + "learning_rate": 1.588358739101637e-06, + "loss": 0.5141, + "step": 12040 + }, + { + "epoch": 0.49686391020879755, + "grad_norm": 2.4496452254428758, + "learning_rate": 1.5881586104255113e-06, + "loss": 0.5468, + "step": 12041 + }, + { + "epoch": 0.49690517454815547, + "grad_norm": 3.659630096706318, + "learning_rate": 1.587958480174653e-06, + "loss": 0.5343, + "step": 12042 + }, + { + "epoch": 0.4969464388875134, + "grad_norm": 2.859422129444688, + "learning_rate": 1.5877583483526359e-06, + "loss": 0.5302, + "step": 12043 + }, + { + "epoch": 0.49698770322687136, + "grad_norm": 2.3694190795354997, + "learning_rate": 1.5875582149630348e-06, + "loss": 0.5561, + "step": 12044 + }, + { + "epoch": 0.4970289675662293, + "grad_norm": 2.452469370080687, + "learning_rate": 1.5873580800094254e-06, + "loss": 0.5368, + "step": 12045 + }, + { + "epoch": 0.4970702319055872, + "grad_norm": 2.7148667582380313, + "learning_rate": 1.587157943495382e-06, + "loss": 0.5392, + "step": 12046 + }, + { + "epoch": 0.4971114962449451, + "grad_norm": 2.539276027355404, + "learning_rate": 1.5869578054244793e-06, + "loss": 0.5241, + "step": 12047 + }, + { + "epoch": 0.497152760584303, + "grad_norm": 2.8797333171289474, + "learning_rate": 1.5867576658002933e-06, + "loss": 0.5203, + "step": 12048 + }, + { + "epoch": 0.497194024923661, + "grad_norm": 3.0725502287440154, + "learning_rate": 1.5865575246263979e-06, + "loss": 0.555, + "step": 12049 + }, + { + "epoch": 0.4972352892630189, + "grad_norm": 3.316394004233442, + "learning_rate": 1.5863573819063687e-06, + "loss": 0.5241, + "step": 12050 + }, + { + "epoch": 0.4972765536023768, + "grad_norm": 2.6854873149256218, + "learning_rate": 1.5861572376437808e-06, + "loss": 0.5245, + "step": 12051 + }, + { + "epoch": 0.49731781794173474, + "grad_norm": 3.3770303074045325, + "learning_rate": 1.5859570918422091e-06, + "loss": 0.5108, + "step": 12052 + }, + { + "epoch": 0.49735908228109266, + "grad_norm": 2.5061302239074315, + "learning_rate": 1.5857569445052285e-06, + "loss": 0.5066, + "step": 12053 + }, + { + "epoch": 0.49740034662045063, + "grad_norm": 2.593350942214341, + "learning_rate": 1.5855567956364146e-06, + "loss": 0.557, + "step": 12054 + }, + { + "epoch": 0.49744161095980854, + "grad_norm": 4.829071431342663, + "learning_rate": 1.5853566452393417e-06, + "loss": 0.5586, + "step": 12055 + }, + { + "epoch": 0.49748287529916646, + "grad_norm": 2.522563007734888, + "learning_rate": 1.5851564933175862e-06, + "loss": 0.5073, + "step": 12056 + }, + { + "epoch": 0.4975241396385244, + "grad_norm": 3.6811989928269937, + "learning_rate": 1.5849563398747222e-06, + "loss": 0.5378, + "step": 12057 + }, + { + "epoch": 0.4975654039778823, + "grad_norm": 2.3616676169430004, + "learning_rate": 1.5847561849143265e-06, + "loss": 0.4975, + "step": 12058 + }, + { + "epoch": 0.49760666831724026, + "grad_norm": 3.1572045604590206, + "learning_rate": 1.5845560284399722e-06, + "loss": 0.516, + "step": 12059 + }, + { + "epoch": 0.4976479326565982, + "grad_norm": 2.651360954467136, + "learning_rate": 1.5843558704552364e-06, + "loss": 0.5029, + "step": 12060 + }, + { + "epoch": 0.4976891969959561, + "grad_norm": 3.323660659212497, + "learning_rate": 1.584155710963693e-06, + "loss": 0.5064, + "step": 12061 + }, + { + "epoch": 0.497730461335314, + "grad_norm": 2.0412362745456862, + "learning_rate": 1.5839555499689187e-06, + "loss": 0.4845, + "step": 12062 + }, + { + "epoch": 0.4977717256746719, + "grad_norm": 4.26761603617805, + "learning_rate": 1.5837553874744876e-06, + "loss": 0.5517, + "step": 12063 + }, + { + "epoch": 0.4978129900140299, + "grad_norm": 3.830839449953017, + "learning_rate": 1.583555223483977e-06, + "loss": 0.5533, + "step": 12064 + }, + { + "epoch": 0.4978542543533878, + "grad_norm": 2.916349091683663, + "learning_rate": 1.5833550580009599e-06, + "loss": 0.5147, + "step": 12065 + }, + { + "epoch": 0.49789551869274573, + "grad_norm": 2.675131760790399, + "learning_rate": 1.5831548910290133e-06, + "loss": 0.5691, + "step": 12066 + }, + { + "epoch": 0.49793678303210365, + "grad_norm": 2.762040200043209, + "learning_rate": 1.5829547225717123e-06, + "loss": 0.5227, + "step": 12067 + }, + { + "epoch": 0.49797804737146156, + "grad_norm": 2.9497845278999457, + "learning_rate": 1.5827545526326324e-06, + "loss": 0.5936, + "step": 12068 + }, + { + "epoch": 0.49801931171081953, + "grad_norm": 7.66475906198592, + "learning_rate": 1.5825543812153491e-06, + "loss": 0.5086, + "step": 12069 + }, + { + "epoch": 0.49806057605017745, + "grad_norm": 2.115392699529232, + "learning_rate": 1.5823542083234383e-06, + "loss": 0.5041, + "step": 12070 + }, + { + "epoch": 0.49810184038953537, + "grad_norm": 4.883367858646039, + "learning_rate": 1.5821540339604753e-06, + "loss": 0.5297, + "step": 12071 + }, + { + "epoch": 0.4981431047288933, + "grad_norm": 18.6097912322099, + "learning_rate": 1.5819538581300358e-06, + "loss": 0.6202, + "step": 12072 + }, + { + "epoch": 0.4981843690682512, + "grad_norm": 2.5290187249273837, + "learning_rate": 1.581753680835695e-06, + "loss": 0.517, + "step": 12073 + }, + { + "epoch": 0.49822563340760917, + "grad_norm": 92.7923614426204, + "learning_rate": 1.5815535020810292e-06, + "loss": 0.5927, + "step": 12074 + }, + { + "epoch": 0.4982668977469671, + "grad_norm": 2.3972702956263485, + "learning_rate": 1.5813533218696138e-06, + "loss": 0.5142, + "step": 12075 + }, + { + "epoch": 0.498308162086325, + "grad_norm": 2.436402386230752, + "learning_rate": 1.5811531402050244e-06, + "loss": 0.5229, + "step": 12076 + }, + { + "epoch": 0.4983494264256829, + "grad_norm": 5.061011919941434, + "learning_rate": 1.5809529570908374e-06, + "loss": 0.5763, + "step": 12077 + }, + { + "epoch": 0.49839069076504083, + "grad_norm": 2.400106494483161, + "learning_rate": 1.5807527725306276e-06, + "loss": 0.5212, + "step": 12078 + }, + { + "epoch": 0.4984319551043988, + "grad_norm": 12.81922836544979, + "learning_rate": 1.5805525865279712e-06, + "loss": 0.5252, + "step": 12079 + }, + { + "epoch": 0.4984732194437567, + "grad_norm": 4.048190884773931, + "learning_rate": 1.5803523990864443e-06, + "loss": 0.5219, + "step": 12080 + }, + { + "epoch": 0.49851448378311464, + "grad_norm": 2.7671713375847493, + "learning_rate": 1.5801522102096224e-06, + "loss": 0.5332, + "step": 12081 + }, + { + "epoch": 0.49855574812247255, + "grad_norm": 5.834768580660102, + "learning_rate": 1.579952019901082e-06, + "loss": 0.5212, + "step": 12082 + }, + { + "epoch": 0.49859701246183047, + "grad_norm": 2.9016603814738238, + "learning_rate": 1.5797518281643977e-06, + "loss": 0.5814, + "step": 12083 + }, + { + "epoch": 0.49863827680118844, + "grad_norm": 3.985523882273675, + "learning_rate": 1.5795516350031472e-06, + "loss": 0.4868, + "step": 12084 + }, + { + "epoch": 0.49867954114054636, + "grad_norm": 10.134916676123323, + "learning_rate": 1.5793514404209048e-06, + "loss": 0.4948, + "step": 12085 + }, + { + "epoch": 0.49872080547990427, + "grad_norm": 3.0394256171297993, + "learning_rate": 1.5791512444212473e-06, + "loss": 0.5255, + "step": 12086 + }, + { + "epoch": 0.4987620698192622, + "grad_norm": 3.715982972766519, + "learning_rate": 1.5789510470077508e-06, + "loss": 0.5828, + "step": 12087 + }, + { + "epoch": 0.4988033341586201, + "grad_norm": 26.23096663693443, + "learning_rate": 1.5787508481839908e-06, + "loss": 0.4971, + "step": 12088 + }, + { + "epoch": 0.498844598497978, + "grad_norm": 2.6005126459982786, + "learning_rate": 1.5785506479535436e-06, + "loss": 0.5176, + "step": 12089 + }, + { + "epoch": 0.498885862837336, + "grad_norm": 2.537544535031667, + "learning_rate": 1.5783504463199858e-06, + "loss": 0.5097, + "step": 12090 + }, + { + "epoch": 0.4989271271766939, + "grad_norm": 2.1205182357813697, + "learning_rate": 1.5781502432868928e-06, + "loss": 0.5201, + "step": 12091 + }, + { + "epoch": 0.4989683915160518, + "grad_norm": 2.806089150754459, + "learning_rate": 1.5779500388578407e-06, + "loss": 0.5035, + "step": 12092 + }, + { + "epoch": 0.49900965585540974, + "grad_norm": 2.893940788107622, + "learning_rate": 1.577749833036406e-06, + "loss": 0.6048, + "step": 12093 + }, + { + "epoch": 0.49905092019476766, + "grad_norm": 2.587344799718986, + "learning_rate": 1.5775496258261647e-06, + "loss": 0.5165, + "step": 12094 + }, + { + "epoch": 0.4990921845341256, + "grad_norm": 2.2112541698823245, + "learning_rate": 1.5773494172306934e-06, + "loss": 0.5281, + "step": 12095 + }, + { + "epoch": 0.49913344887348354, + "grad_norm": 5.87369820050023, + "learning_rate": 1.5771492072535678e-06, + "loss": 0.5343, + "step": 12096 + }, + { + "epoch": 0.49917471321284146, + "grad_norm": 2.856388614604103, + "learning_rate": 1.5769489958983647e-06, + "loss": 0.5428, + "step": 12097 + }, + { + "epoch": 0.4992159775521994, + "grad_norm": 2.9185294385698644, + "learning_rate": 1.5767487831686596e-06, + "loss": 0.5534, + "step": 12098 + }, + { + "epoch": 0.4992572418915573, + "grad_norm": 1.7303180502672655, + "learning_rate": 1.5765485690680294e-06, + "loss": 0.4678, + "step": 12099 + }, + { + "epoch": 0.49929850623091526, + "grad_norm": 2.198016151964185, + "learning_rate": 1.57634835360005e-06, + "loss": 0.5527, + "step": 12100 + }, + { + "epoch": 0.4993397705702732, + "grad_norm": 17.38351172997126, + "learning_rate": 1.5761481367682984e-06, + "loss": 0.5585, + "step": 12101 + }, + { + "epoch": 0.4993810349096311, + "grad_norm": 4.035003941261131, + "learning_rate": 1.5759479185763502e-06, + "loss": 0.5046, + "step": 12102 + }, + { + "epoch": 0.499422299248989, + "grad_norm": 2.32673939251777, + "learning_rate": 1.5757476990277826e-06, + "loss": 0.5089, + "step": 12103 + }, + { + "epoch": 0.4994635635883469, + "grad_norm": 6.58536418716299, + "learning_rate": 1.5755474781261715e-06, + "loss": 0.5947, + "step": 12104 + }, + { + "epoch": 0.4995048279277049, + "grad_norm": 1.8769491555107727, + "learning_rate": 1.5753472558750934e-06, + "loss": 0.5961, + "step": 12105 + }, + { + "epoch": 0.4995460922670628, + "grad_norm": 3.172547071890499, + "learning_rate": 1.5751470322781249e-06, + "loss": 0.5141, + "step": 12106 + }, + { + "epoch": 0.49958735660642073, + "grad_norm": 2.5419238838472134, + "learning_rate": 1.5749468073388424e-06, + "loss": 0.4997, + "step": 12107 + }, + { + "epoch": 0.49962862094577865, + "grad_norm": 2.5687917542872536, + "learning_rate": 1.574746581060822e-06, + "loss": 0.5593, + "step": 12108 + }, + { + "epoch": 0.49966988528513656, + "grad_norm": 11.001006647451089, + "learning_rate": 1.574546353447641e-06, + "loss": 0.5414, + "step": 12109 + }, + { + "epoch": 0.49971114962449453, + "grad_norm": 17.0775737035451, + "learning_rate": 1.574346124502876e-06, + "loss": 0.5515, + "step": 12110 + }, + { + "epoch": 0.49975241396385245, + "grad_norm": 1.9455907403217096, + "learning_rate": 1.5741458942301028e-06, + "loss": 0.516, + "step": 12111 + }, + { + "epoch": 0.49979367830321036, + "grad_norm": 2.9565051327586196, + "learning_rate": 1.5739456626328985e-06, + "loss": 0.491, + "step": 12112 + }, + { + "epoch": 0.4998349426425683, + "grad_norm": 4.541305415308936, + "learning_rate": 1.57374542971484e-06, + "loss": 0.5414, + "step": 12113 + }, + { + "epoch": 0.4998762069819262, + "grad_norm": 6.563063355339075, + "learning_rate": 1.5735451954795028e-06, + "loss": 0.4917, + "step": 12114 + }, + { + "epoch": 0.49991747132128417, + "grad_norm": 2.5150443264459112, + "learning_rate": 1.5733449599304652e-06, + "loss": 0.5431, + "step": 12115 + }, + { + "epoch": 0.4999587356606421, + "grad_norm": 2.383542831729585, + "learning_rate": 1.5731447230713025e-06, + "loss": 0.4962, + "step": 12116 + }, + { + "epoch": 0.5, + "grad_norm": 2.242055626571969, + "learning_rate": 1.5729444849055928e-06, + "loss": 0.5506, + "step": 12117 + }, + { + "epoch": 0.5000412643393579, + "grad_norm": 3.5176723653357733, + "learning_rate": 1.572744245436912e-06, + "loss": 0.5509, + "step": 12118 + }, + { + "epoch": 0.5000825286787158, + "grad_norm": 3.0893293438128477, + "learning_rate": 1.5725440046688364e-06, + "loss": 0.5517, + "step": 12119 + }, + { + "epoch": 0.5001237930180737, + "grad_norm": 2.2963781258165477, + "learning_rate": 1.572343762604944e-06, + "loss": 0.517, + "step": 12120 + }, + { + "epoch": 0.5001650573574317, + "grad_norm": 5.0747117170464255, + "learning_rate": 1.572143519248811e-06, + "loss": 0.5454, + "step": 12121 + }, + { + "epoch": 0.5002063216967896, + "grad_norm": 3.365600666296817, + "learning_rate": 1.571943274604014e-06, + "loss": 0.5269, + "step": 12122 + }, + { + "epoch": 0.5002475860361476, + "grad_norm": 3.2324710015621725, + "learning_rate": 1.5717430286741302e-06, + "loss": 0.5005, + "step": 12123 + }, + { + "epoch": 0.5002888503755055, + "grad_norm": 4.38061152254972, + "learning_rate": 1.5715427814627366e-06, + "loss": 0.4591, + "step": 12124 + }, + { + "epoch": 0.5003301147148634, + "grad_norm": 2.552796572208584, + "learning_rate": 1.5713425329734099e-06, + "loss": 0.5241, + "step": 12125 + }, + { + "epoch": 0.5003713790542214, + "grad_norm": 3.172118847346865, + "learning_rate": 1.5711422832097267e-06, + "loss": 0.5963, + "step": 12126 + }, + { + "epoch": 0.5004126433935793, + "grad_norm": 2.3179793255248096, + "learning_rate": 1.5709420321752645e-06, + "loss": 0.51, + "step": 12127 + }, + { + "epoch": 0.5004539077329372, + "grad_norm": 2.3327820236746906, + "learning_rate": 1.5707417798736007e-06, + "loss": 0.5636, + "step": 12128 + }, + { + "epoch": 0.5004951720722951, + "grad_norm": 3.15496945768113, + "learning_rate": 1.570541526308311e-06, + "loss": 0.4753, + "step": 12129 + }, + { + "epoch": 0.500536436411653, + "grad_norm": 3.3601075240642833, + "learning_rate": 1.570341271482974e-06, + "loss": 0.5419, + "step": 12130 + }, + { + "epoch": 0.5005777007510109, + "grad_norm": 2.491229248978129, + "learning_rate": 1.570141015401165e-06, + "loss": 0.5114, + "step": 12131 + }, + { + "epoch": 0.5006189650903689, + "grad_norm": 5.866486877134168, + "learning_rate": 1.569940758066463e-06, + "loss": 0.5265, + "step": 12132 + }, + { + "epoch": 0.5006602294297269, + "grad_norm": 2.6249346136164333, + "learning_rate": 1.5697404994824431e-06, + "loss": 0.5382, + "step": 12133 + }, + { + "epoch": 0.5007014937690848, + "grad_norm": 2.5018301041468125, + "learning_rate": 1.569540239652684e-06, + "loss": 0.5329, + "step": 12134 + }, + { + "epoch": 0.5007427581084427, + "grad_norm": 3.4101733536858516, + "learning_rate": 1.569339978580762e-06, + "loss": 0.5047, + "step": 12135 + }, + { + "epoch": 0.5007840224478006, + "grad_norm": 4.02194743384007, + "learning_rate": 1.569139716270255e-06, + "loss": 0.518, + "step": 12136 + }, + { + "epoch": 0.5008252867871585, + "grad_norm": 6.800006913764468, + "learning_rate": 1.5689394527247388e-06, + "loss": 0.4915, + "step": 12137 + }, + { + "epoch": 0.5008665511265165, + "grad_norm": 2.278243139852169, + "learning_rate": 1.5687391879477922e-06, + "loss": 0.5098, + "step": 12138 + }, + { + "epoch": 0.5009078154658744, + "grad_norm": 2.8702189018921715, + "learning_rate": 1.5685389219429913e-06, + "loss": 0.5665, + "step": 12139 + }, + { + "epoch": 0.5009490798052323, + "grad_norm": 4.416309244146234, + "learning_rate": 1.5683386547139146e-06, + "loss": 0.5274, + "step": 12140 + }, + { + "epoch": 0.5009903441445902, + "grad_norm": 3.8645079159299187, + "learning_rate": 1.5681383862641379e-06, + "loss": 0.5504, + "step": 12141 + }, + { + "epoch": 0.5010316084839481, + "grad_norm": 2.7573818886687262, + "learning_rate": 1.5679381165972394e-06, + "loss": 0.4837, + "step": 12142 + }, + { + "epoch": 0.5010728728233061, + "grad_norm": 2.4286621063063825, + "learning_rate": 1.567737845716796e-06, + "loss": 0.551, + "step": 12143 + }, + { + "epoch": 0.5011141371626641, + "grad_norm": 3.45216644461374, + "learning_rate": 1.5675375736263856e-06, + "loss": 0.5104, + "step": 12144 + }, + { + "epoch": 0.501155401502022, + "grad_norm": 1.9636274749454299, + "learning_rate": 1.5673373003295848e-06, + "loss": 0.52, + "step": 12145 + }, + { + "epoch": 0.5011966658413799, + "grad_norm": 5.300280428251232, + "learning_rate": 1.5671370258299713e-06, + "loss": 0.5192, + "step": 12146 + }, + { + "epoch": 0.5012379301807378, + "grad_norm": 2.8162042525640882, + "learning_rate": 1.5669367501311225e-06, + "loss": 0.5555, + "step": 12147 + }, + { + "epoch": 0.5012791945200957, + "grad_norm": 2.618410411270882, + "learning_rate": 1.5667364732366164e-06, + "loss": 0.4767, + "step": 12148 + }, + { + "epoch": 0.5013204588594536, + "grad_norm": 4.800397591833809, + "learning_rate": 1.5665361951500295e-06, + "loss": 0.5118, + "step": 12149 + }, + { + "epoch": 0.5013617231988116, + "grad_norm": 3.6906634080960634, + "learning_rate": 1.5663359158749402e-06, + "loss": 0.5026, + "step": 12150 + }, + { + "epoch": 0.5014029875381695, + "grad_norm": 3.148197980283428, + "learning_rate": 1.566135635414925e-06, + "loss": 0.5599, + "step": 12151 + }, + { + "epoch": 0.5014442518775274, + "grad_norm": 2.6442814334558453, + "learning_rate": 1.565935353773562e-06, + "loss": 0.5876, + "step": 12152 + }, + { + "epoch": 0.5014855162168854, + "grad_norm": 12.318321517292821, + "learning_rate": 1.5657350709544287e-06, + "loss": 0.5061, + "step": 12153 + }, + { + "epoch": 0.5015267805562433, + "grad_norm": 6.467194156833627, + "learning_rate": 1.565534786961103e-06, + "loss": 0.5136, + "step": 12154 + }, + { + "epoch": 0.5015680448956013, + "grad_norm": 2.4154416738188, + "learning_rate": 1.565334501797161e-06, + "loss": 0.5506, + "step": 12155 + }, + { + "epoch": 0.5016093092349592, + "grad_norm": 2.2891080231195944, + "learning_rate": 1.5651342154661826e-06, + "loss": 0.567, + "step": 12156 + }, + { + "epoch": 0.5016505735743171, + "grad_norm": 5.227553149253717, + "learning_rate": 1.5649339279717437e-06, + "loss": 0.5072, + "step": 12157 + }, + { + "epoch": 0.501691837913675, + "grad_norm": 7.456767917765522, + "learning_rate": 1.5647336393174218e-06, + "loss": 0.5238, + "step": 12158 + }, + { + "epoch": 0.5017331022530329, + "grad_norm": 3.881629873323189, + "learning_rate": 1.5645333495067957e-06, + "loss": 0.571, + "step": 12159 + }, + { + "epoch": 0.5017743665923908, + "grad_norm": 26.58875996386722, + "learning_rate": 1.5643330585434426e-06, + "loss": 0.5285, + "step": 12160 + }, + { + "epoch": 0.5018156309317487, + "grad_norm": 4.1800752748275185, + "learning_rate": 1.56413276643094e-06, + "loss": 0.5744, + "step": 12161 + }, + { + "epoch": 0.5018568952711067, + "grad_norm": 28.25021575779661, + "learning_rate": 1.563932473172866e-06, + "loss": 0.5195, + "step": 12162 + }, + { + "epoch": 0.5018981596104647, + "grad_norm": 3.870728830863982, + "learning_rate": 1.5637321787727981e-06, + "loss": 0.5566, + "step": 12163 + }, + { + "epoch": 0.5019394239498226, + "grad_norm": 2.8637371855495624, + "learning_rate": 1.5635318832343133e-06, + "loss": 0.5178, + "step": 12164 + }, + { + "epoch": 0.5019806882891805, + "grad_norm": 2.8933434958503432, + "learning_rate": 1.563331586560991e-06, + "loss": 0.5372, + "step": 12165 + }, + { + "epoch": 0.5020219526285384, + "grad_norm": 3.282075637189376, + "learning_rate": 1.5631312887564075e-06, + "loss": 0.5421, + "step": 12166 + }, + { + "epoch": 0.5020632169678964, + "grad_norm": 3.525925266565893, + "learning_rate": 1.5629309898241416e-06, + "loss": 0.497, + "step": 12167 + }, + { + "epoch": 0.5021044813072543, + "grad_norm": 3.3438174440941366, + "learning_rate": 1.5627306897677705e-06, + "loss": 0.5498, + "step": 12168 + }, + { + "epoch": 0.5021457456466122, + "grad_norm": 4.203067543562982, + "learning_rate": 1.5625303885908732e-06, + "loss": 0.5591, + "step": 12169 + }, + { + "epoch": 0.5021870099859701, + "grad_norm": 3.7932188816360126, + "learning_rate": 1.5623300862970258e-06, + "loss": 0.5357, + "step": 12170 + }, + { + "epoch": 0.502228274325328, + "grad_norm": 2.56133790331132, + "learning_rate": 1.5621297828898076e-06, + "loss": 0.507, + "step": 12171 + }, + { + "epoch": 0.5022695386646859, + "grad_norm": 3.539925976557371, + "learning_rate": 1.561929478372796e-06, + "loss": 0.5456, + "step": 12172 + }, + { + "epoch": 0.502310803004044, + "grad_norm": 5.491984131337474, + "learning_rate": 1.5617291727495688e-06, + "loss": 0.4962, + "step": 12173 + }, + { + "epoch": 0.5023520673434019, + "grad_norm": 2.731870540730423, + "learning_rate": 1.5615288660237043e-06, + "loss": 0.5687, + "step": 12174 + }, + { + "epoch": 0.5023933316827598, + "grad_norm": 3.804546284303045, + "learning_rate": 1.5613285581987805e-06, + "loss": 0.5665, + "step": 12175 + }, + { + "epoch": 0.5024345960221177, + "grad_norm": 3.7004893241347667, + "learning_rate": 1.561128249278375e-06, + "loss": 0.5291, + "step": 12176 + }, + { + "epoch": 0.5024758603614756, + "grad_norm": 4.867820661270009, + "learning_rate": 1.560927939266066e-06, + "loss": 0.5214, + "step": 12177 + }, + { + "epoch": 0.5025171247008335, + "grad_norm": 2.1547975667812542, + "learning_rate": 1.5607276281654315e-06, + "loss": 0.5057, + "step": 12178 + }, + { + "epoch": 0.5025583890401915, + "grad_norm": 2.7788538097056144, + "learning_rate": 1.56052731598005e-06, + "loss": 0.489, + "step": 12179 + }, + { + "epoch": 0.5025996533795494, + "grad_norm": 1.8423708466909143, + "learning_rate": 1.5603270027134989e-06, + "loss": 0.5247, + "step": 12180 + }, + { + "epoch": 0.5026409177189073, + "grad_norm": 3.0148712307192707, + "learning_rate": 1.560126688369357e-06, + "loss": 0.5187, + "step": 12181 + }, + { + "epoch": 0.5026821820582652, + "grad_norm": 3.3260765132729055, + "learning_rate": 1.5599263729512017e-06, + "loss": 0.5665, + "step": 12182 + }, + { + "epoch": 0.5027234463976231, + "grad_norm": 2.3620603118479946, + "learning_rate": 1.5597260564626116e-06, + "loss": 0.5491, + "step": 12183 + }, + { + "epoch": 0.5027647107369811, + "grad_norm": 5.726185707830331, + "learning_rate": 1.5595257389071645e-06, + "loss": 0.5381, + "step": 12184 + }, + { + "epoch": 0.5028059750763391, + "grad_norm": 2.4008827626669893, + "learning_rate": 1.559325420288439e-06, + "loss": 0.5229, + "step": 12185 + }, + { + "epoch": 0.502847239415697, + "grad_norm": 2.1555450669008978, + "learning_rate": 1.559125100610013e-06, + "loss": 0.4924, + "step": 12186 + }, + { + "epoch": 0.5028885037550549, + "grad_norm": 3.0322024325608607, + "learning_rate": 1.5589247798754647e-06, + "loss": 0.5764, + "step": 12187 + }, + { + "epoch": 0.5029297680944128, + "grad_norm": 3.111045631889747, + "learning_rate": 1.5587244580883724e-06, + "loss": 0.5186, + "step": 12188 + }, + { + "epoch": 0.5029710324337707, + "grad_norm": 2.287591772586211, + "learning_rate": 1.558524135252315e-06, + "loss": 0.523, + "step": 12189 + }, + { + "epoch": 0.5030122967731286, + "grad_norm": 2.772508044166617, + "learning_rate": 1.5583238113708696e-06, + "loss": 0.5212, + "step": 12190 + }, + { + "epoch": 0.5030535611124866, + "grad_norm": 12.375893173877047, + "learning_rate": 1.558123486447615e-06, + "loss": 0.6097, + "step": 12191 + }, + { + "epoch": 0.5030948254518445, + "grad_norm": 3.2824189107382096, + "learning_rate": 1.5579231604861297e-06, + "loss": 0.551, + "step": 12192 + }, + { + "epoch": 0.5031360897912024, + "grad_norm": 2.5817881953751463, + "learning_rate": 1.5577228334899917e-06, + "loss": 0.5229, + "step": 12193 + }, + { + "epoch": 0.5031773541305604, + "grad_norm": 6.97870210185942, + "learning_rate": 1.5575225054627793e-06, + "loss": 0.5577, + "step": 12194 + }, + { + "epoch": 0.5032186184699183, + "grad_norm": 2.103069557769215, + "learning_rate": 1.5573221764080715e-06, + "loss": 0.5309, + "step": 12195 + }, + { + "epoch": 0.5032598828092762, + "grad_norm": 5.089687601981061, + "learning_rate": 1.5571218463294462e-06, + "loss": 0.5651, + "step": 12196 + }, + { + "epoch": 0.5033011471486342, + "grad_norm": 2.828604180718924, + "learning_rate": 1.5569215152304816e-06, + "loss": 0.5234, + "step": 12197 + }, + { + "epoch": 0.5033424114879921, + "grad_norm": 2.7002535173206734, + "learning_rate": 1.5567211831147564e-06, + "loss": 0.5095, + "step": 12198 + }, + { + "epoch": 0.50338367582735, + "grad_norm": 3.7616670560668317, + "learning_rate": 1.5565208499858493e-06, + "loss": 0.5731, + "step": 12199 + }, + { + "epoch": 0.5034249401667079, + "grad_norm": 9.8272276297638, + "learning_rate": 1.556320515847338e-06, + "loss": 0.561, + "step": 12200 + }, + { + "epoch": 0.5034662045060658, + "grad_norm": 3.069501985807152, + "learning_rate": 1.5561201807028014e-06, + "loss": 0.5644, + "step": 12201 + }, + { + "epoch": 0.5035074688454237, + "grad_norm": 2.8580024669737, + "learning_rate": 1.5559198445558181e-06, + "loss": 0.5484, + "step": 12202 + }, + { + "epoch": 0.5035487331847817, + "grad_norm": 3.1805500723661178, + "learning_rate": 1.5557195074099662e-06, + "loss": 0.5064, + "step": 12203 + }, + { + "epoch": 0.5035899975241397, + "grad_norm": 3.371590898909896, + "learning_rate": 1.5555191692688251e-06, + "loss": 0.5414, + "step": 12204 + }, + { + "epoch": 0.5036312618634976, + "grad_norm": 2.985766930204365, + "learning_rate": 1.5553188301359724e-06, + "loss": 0.5263, + "step": 12205 + }, + { + "epoch": 0.5036725262028555, + "grad_norm": 2.7083906655500383, + "learning_rate": 1.555118490014987e-06, + "loss": 0.5107, + "step": 12206 + }, + { + "epoch": 0.5037137905422134, + "grad_norm": 3.5956363402031135, + "learning_rate": 1.5549181489094473e-06, + "loss": 0.5487, + "step": 12207 + }, + { + "epoch": 0.5037550548815714, + "grad_norm": 11.039170813367772, + "learning_rate": 1.5547178068229324e-06, + "loss": 0.5484, + "step": 12208 + }, + { + "epoch": 0.5037963192209293, + "grad_norm": 3.3931491517483385, + "learning_rate": 1.5545174637590205e-06, + "loss": 0.5188, + "step": 12209 + }, + { + "epoch": 0.5038375835602872, + "grad_norm": 2.201107371383737, + "learning_rate": 1.5543171197212907e-06, + "loss": 0.5071, + "step": 12210 + }, + { + "epoch": 0.5038788478996451, + "grad_norm": 14.159958609953417, + "learning_rate": 1.5541167747133205e-06, + "loss": 0.5661, + "step": 12211 + }, + { + "epoch": 0.503920112239003, + "grad_norm": 7.113102674079393, + "learning_rate": 1.55391642873869e-06, + "loss": 0.5243, + "step": 12212 + }, + { + "epoch": 0.5039613765783609, + "grad_norm": 3.1592779881759774, + "learning_rate": 1.5537160818009766e-06, + "loss": 0.5167, + "step": 12213 + }, + { + "epoch": 0.504002640917719, + "grad_norm": 2.078290033177319, + "learning_rate": 1.55351573390376e-06, + "loss": 0.4896, + "step": 12214 + }, + { + "epoch": 0.5040439052570769, + "grad_norm": 3.1259573717888363, + "learning_rate": 1.5533153850506187e-06, + "loss": 0.4868, + "step": 12215 + }, + { + "epoch": 0.5040851695964348, + "grad_norm": 3.066276198673736, + "learning_rate": 1.5531150352451311e-06, + "loss": 0.4413, + "step": 12216 + }, + { + "epoch": 0.5041264339357927, + "grad_norm": 2.6936609893516037, + "learning_rate": 1.5529146844908758e-06, + "loss": 0.5492, + "step": 12217 + }, + { + "epoch": 0.5041676982751506, + "grad_norm": 6.336288304160936, + "learning_rate": 1.5527143327914319e-06, + "loss": 0.5712, + "step": 12218 + }, + { + "epoch": 0.5042089626145085, + "grad_norm": 2.55258480828307, + "learning_rate": 1.5525139801503783e-06, + "loss": 0.5913, + "step": 12219 + }, + { + "epoch": 0.5042502269538665, + "grad_norm": 2.369780390446849, + "learning_rate": 1.5523136265712937e-06, + "loss": 0.5224, + "step": 12220 + }, + { + "epoch": 0.5042914912932244, + "grad_norm": 3.3734422034459377, + "learning_rate": 1.5521132720577564e-06, + "loss": 0.5599, + "step": 12221 + }, + { + "epoch": 0.5043327556325823, + "grad_norm": 4.833171411701424, + "learning_rate": 1.5519129166133465e-06, + "loss": 0.5621, + "step": 12222 + }, + { + "epoch": 0.5043740199719402, + "grad_norm": 5.062223866412417, + "learning_rate": 1.5517125602416415e-06, + "loss": 0.5218, + "step": 12223 + }, + { + "epoch": 0.5044152843112982, + "grad_norm": 3.256645302705215, + "learning_rate": 1.5515122029462212e-06, + "loss": 0.4539, + "step": 12224 + }, + { + "epoch": 0.5044565486506561, + "grad_norm": 1.8653941963144454, + "learning_rate": 1.5513118447306634e-06, + "loss": 0.4506, + "step": 12225 + }, + { + "epoch": 0.5044978129900141, + "grad_norm": 5.400137218671867, + "learning_rate": 1.5511114855985483e-06, + "loss": 0.5608, + "step": 12226 + }, + { + "epoch": 0.504539077329372, + "grad_norm": 3.8956353707898717, + "learning_rate": 1.550911125553454e-06, + "loss": 0.5652, + "step": 12227 + }, + { + "epoch": 0.5045803416687299, + "grad_norm": 2.3420316255761455, + "learning_rate": 1.55071076459896e-06, + "loss": 0.5825, + "step": 12228 + }, + { + "epoch": 0.5046216060080878, + "grad_norm": 2.907196139000041, + "learning_rate": 1.5505104027386443e-06, + "loss": 0.543, + "step": 12229 + }, + { + "epoch": 0.5046628703474457, + "grad_norm": 2.2669644622814276, + "learning_rate": 1.5503100399760866e-06, + "loss": 0.4383, + "step": 12230 + }, + { + "epoch": 0.5047041346868036, + "grad_norm": 2.3648435018649274, + "learning_rate": 1.5501096763148658e-06, + "loss": 0.5154, + "step": 12231 + }, + { + "epoch": 0.5047453990261616, + "grad_norm": 2.9058521793972827, + "learning_rate": 1.5499093117585608e-06, + "loss": 0.5489, + "step": 12232 + }, + { + "epoch": 0.5047866633655195, + "grad_norm": 14.399340764560785, + "learning_rate": 1.5497089463107504e-06, + "loss": 0.5372, + "step": 12233 + }, + { + "epoch": 0.5048279277048775, + "grad_norm": 2.0837451944851964, + "learning_rate": 1.5495085799750144e-06, + "loss": 0.5236, + "step": 12234 + }, + { + "epoch": 0.5048691920442354, + "grad_norm": 82.80617645091311, + "learning_rate": 1.5493082127549313e-06, + "loss": 0.508, + "step": 12235 + }, + { + "epoch": 0.5049104563835933, + "grad_norm": 3.662640945815723, + "learning_rate": 1.5491078446540797e-06, + "loss": 0.5574, + "step": 12236 + }, + { + "epoch": 0.5049517207229512, + "grad_norm": 8.458727730201668, + "learning_rate": 1.5489074756760395e-06, + "loss": 0.5586, + "step": 12237 + }, + { + "epoch": 0.5049929850623092, + "grad_norm": 5.632183344680834, + "learning_rate": 1.5487071058243891e-06, + "loss": 0.5281, + "step": 12238 + }, + { + "epoch": 0.5050342494016671, + "grad_norm": 4.448977243243087, + "learning_rate": 1.5485067351027083e-06, + "loss": 0.5649, + "step": 12239 + }, + { + "epoch": 0.505075513741025, + "grad_norm": 2.7150752856793474, + "learning_rate": 1.5483063635145757e-06, + "loss": 0.5513, + "step": 12240 + }, + { + "epoch": 0.5051167780803829, + "grad_norm": 3.5026546314586873, + "learning_rate": 1.5481059910635703e-06, + "loss": 0.5622, + "step": 12241 + }, + { + "epoch": 0.5051580424197408, + "grad_norm": 2.7832861594052423, + "learning_rate": 1.5479056177532723e-06, + "loss": 0.5362, + "step": 12242 + }, + { + "epoch": 0.5051993067590987, + "grad_norm": 5.515632432022737, + "learning_rate": 1.5477052435872599e-06, + "loss": 0.5412, + "step": 12243 + }, + { + "epoch": 0.5052405710984567, + "grad_norm": 9.921515802501947, + "learning_rate": 1.547504868569112e-06, + "loss": 0.5091, + "step": 12244 + }, + { + "epoch": 0.5052818354378147, + "grad_norm": 3.2258294994528875, + "learning_rate": 1.5473044927024086e-06, + "loss": 0.5203, + "step": 12245 + }, + { + "epoch": 0.5053230997771726, + "grad_norm": 3.0524788639718285, + "learning_rate": 1.5471041159907285e-06, + "loss": 0.5157, + "step": 12246 + }, + { + "epoch": 0.5053643641165305, + "grad_norm": 2.654321175739809, + "learning_rate": 1.5469037384376513e-06, + "loss": 0.5232, + "step": 12247 + }, + { + "epoch": 0.5054056284558884, + "grad_norm": 4.52489552914674, + "learning_rate": 1.546703360046756e-06, + "loss": 0.463, + "step": 12248 + }, + { + "epoch": 0.5054468927952463, + "grad_norm": 6.407434425787112, + "learning_rate": 1.5465029808216217e-06, + "loss": 0.5523, + "step": 12249 + }, + { + "epoch": 0.5054881571346043, + "grad_norm": 2.4639995618113013, + "learning_rate": 1.546302600765828e-06, + "loss": 0.4765, + "step": 12250 + }, + { + "epoch": 0.5055294214739622, + "grad_norm": 2.9801396800898607, + "learning_rate": 1.5461022198829537e-06, + "loss": 0.5461, + "step": 12251 + }, + { + "epoch": 0.5055706858133201, + "grad_norm": 5.233038586334599, + "learning_rate": 1.5459018381765786e-06, + "loss": 0.5775, + "step": 12252 + }, + { + "epoch": 0.505611950152678, + "grad_norm": 3.478402414108858, + "learning_rate": 1.545701455650282e-06, + "loss": 0.5531, + "step": 12253 + }, + { + "epoch": 0.5056532144920359, + "grad_norm": 2.746387965531879, + "learning_rate": 1.5455010723076427e-06, + "loss": 0.5388, + "step": 12254 + }, + { + "epoch": 0.505694478831394, + "grad_norm": 6.714888921515282, + "learning_rate": 1.5453006881522413e-06, + "loss": 0.5521, + "step": 12255 + }, + { + "epoch": 0.5057357431707519, + "grad_norm": 3.013494778648832, + "learning_rate": 1.5451003031876555e-06, + "loss": 0.5642, + "step": 12256 + }, + { + "epoch": 0.5057770075101098, + "grad_norm": 2.8211404407172473, + "learning_rate": 1.5448999174174655e-06, + "loss": 0.5231, + "step": 12257 + }, + { + "epoch": 0.5058182718494677, + "grad_norm": 6.364772535309843, + "learning_rate": 1.5446995308452504e-06, + "loss": 0.5655, + "step": 12258 + }, + { + "epoch": 0.5058595361888256, + "grad_norm": 3.1251944717233386, + "learning_rate": 1.5444991434745906e-06, + "loss": 0.5322, + "step": 12259 + }, + { + "epoch": 0.5059008005281835, + "grad_norm": 3.84766556648686, + "learning_rate": 1.5442987553090641e-06, + "loss": 0.5184, + "step": 12260 + }, + { + "epoch": 0.5059420648675415, + "grad_norm": 2.428054369866879, + "learning_rate": 1.5440983663522517e-06, + "loss": 0.5426, + "step": 12261 + }, + { + "epoch": 0.5059833292068994, + "grad_norm": 4.634559043750815, + "learning_rate": 1.5438979766077315e-06, + "loss": 0.5094, + "step": 12262 + }, + { + "epoch": 0.5060245935462573, + "grad_norm": 2.6187054670501504, + "learning_rate": 1.5436975860790841e-06, + "loss": 0.5193, + "step": 12263 + }, + { + "epoch": 0.5060658578856152, + "grad_norm": 3.266883438558122, + "learning_rate": 1.543497194769888e-06, + "loss": 0.5092, + "step": 12264 + }, + { + "epoch": 0.5061071222249732, + "grad_norm": 5.626137938380663, + "learning_rate": 1.5432968026837235e-06, + "loss": 0.5367, + "step": 12265 + }, + { + "epoch": 0.5061483865643311, + "grad_norm": 2.3541493633414814, + "learning_rate": 1.5430964098241697e-06, + "loss": 0.5053, + "step": 12266 + }, + { + "epoch": 0.5061896509036891, + "grad_norm": 3.8581500942162443, + "learning_rate": 1.5428960161948062e-06, + "loss": 0.5515, + "step": 12267 + }, + { + "epoch": 0.506230915243047, + "grad_norm": 2.6558514064910814, + "learning_rate": 1.5426956217992128e-06, + "loss": 0.503, + "step": 12268 + }, + { + "epoch": 0.5062721795824049, + "grad_norm": 2.245804800514942, + "learning_rate": 1.5424952266409686e-06, + "loss": 0.5496, + "step": 12269 + }, + { + "epoch": 0.5063134439217628, + "grad_norm": 2.691262387163571, + "learning_rate": 1.5422948307236534e-06, + "loss": 0.5539, + "step": 12270 + }, + { + "epoch": 0.5063547082611207, + "grad_norm": 3.45381476669385, + "learning_rate": 1.542094434050847e-06, + "loss": 0.5548, + "step": 12271 + }, + { + "epoch": 0.5063959726004786, + "grad_norm": 2.8595524656910896, + "learning_rate": 1.541894036626128e-06, + "loss": 0.5206, + "step": 12272 + }, + { + "epoch": 0.5064372369398366, + "grad_norm": 4.253800492976594, + "learning_rate": 1.5416936384530773e-06, + "loss": 0.5753, + "step": 12273 + }, + { + "epoch": 0.5064785012791945, + "grad_norm": 10.247390530136894, + "learning_rate": 1.5414932395352735e-06, + "loss": 0.5042, + "step": 12274 + }, + { + "epoch": 0.5065197656185525, + "grad_norm": 2.4605246409384978, + "learning_rate": 1.5412928398762968e-06, + "loss": 0.5153, + "step": 12275 + }, + { + "epoch": 0.5065610299579104, + "grad_norm": 3.1055419197974143, + "learning_rate": 1.5410924394797266e-06, + "loss": 0.5466, + "step": 12276 + }, + { + "epoch": 0.5066022942972683, + "grad_norm": 2.208263969743424, + "learning_rate": 1.540892038349143e-06, + "loss": 0.518, + "step": 12277 + }, + { + "epoch": 0.5066435586366262, + "grad_norm": 2.766064975158672, + "learning_rate": 1.5406916364881248e-06, + "loss": 0.5584, + "step": 12278 + }, + { + "epoch": 0.5066848229759842, + "grad_norm": 2.608344648299402, + "learning_rate": 1.5404912339002524e-06, + "loss": 0.5491, + "step": 12279 + }, + { + "epoch": 0.5067260873153421, + "grad_norm": 5.168389724901642, + "learning_rate": 1.5402908305891054e-06, + "loss": 0.5889, + "step": 12280 + }, + { + "epoch": 0.5067673516547, + "grad_norm": 2.727817059553148, + "learning_rate": 1.5400904265582632e-06, + "loss": 0.561, + "step": 12281 + }, + { + "epoch": 0.5068086159940579, + "grad_norm": 2.3915935800529513, + "learning_rate": 1.539890021811306e-06, + "loss": 0.5665, + "step": 12282 + }, + { + "epoch": 0.5068498803334158, + "grad_norm": 1.8781258169272257, + "learning_rate": 1.5396896163518127e-06, + "loss": 0.5088, + "step": 12283 + }, + { + "epoch": 0.5068911446727737, + "grad_norm": 13.735641319089778, + "learning_rate": 1.5394892101833639e-06, + "loss": 0.5572, + "step": 12284 + }, + { + "epoch": 0.5069324090121318, + "grad_norm": 3.230110763424823, + "learning_rate": 1.5392888033095388e-06, + "loss": 0.5485, + "step": 12285 + }, + { + "epoch": 0.5069736733514897, + "grad_norm": 2.835103048637036, + "learning_rate": 1.5390883957339178e-06, + "loss": 0.4988, + "step": 12286 + }, + { + "epoch": 0.5070149376908476, + "grad_norm": 2.441699355252351, + "learning_rate": 1.53888798746008e-06, + "loss": 0.494, + "step": 12287 + }, + { + "epoch": 0.5070562020302055, + "grad_norm": 15.472126123268025, + "learning_rate": 1.5386875784916057e-06, + "loss": 0.5627, + "step": 12288 + }, + { + "epoch": 0.5070974663695634, + "grad_norm": 3.27929087847951, + "learning_rate": 1.538487168832074e-06, + "loss": 0.5342, + "step": 12289 + }, + { + "epoch": 0.5071387307089213, + "grad_norm": 2.317060762066915, + "learning_rate": 1.5382867584850657e-06, + "loss": 0.608, + "step": 12290 + }, + { + "epoch": 0.5071799950482793, + "grad_norm": 2.5614132292274068, + "learning_rate": 1.5380863474541592e-06, + "loss": 0.5217, + "step": 12291 + }, + { + "epoch": 0.5072212593876372, + "grad_norm": 2.9274149791768207, + "learning_rate": 1.5378859357429362e-06, + "loss": 0.5377, + "step": 12292 + }, + { + "epoch": 0.5072625237269951, + "grad_norm": 2.7613326296015086, + "learning_rate": 1.5376855233549752e-06, + "loss": 0.5388, + "step": 12293 + }, + { + "epoch": 0.507303788066353, + "grad_norm": 4.218839033720533, + "learning_rate": 1.5374851102938568e-06, + "loss": 0.5719, + "step": 12294 + }, + { + "epoch": 0.507345052405711, + "grad_norm": 3.3159304472517994, + "learning_rate": 1.53728469656316e-06, + "loss": 0.5529, + "step": 12295 + }, + { + "epoch": 0.507386316745069, + "grad_norm": 1.9290086235531214, + "learning_rate": 1.537084282166466e-06, + "loss": 0.5383, + "step": 12296 + }, + { + "epoch": 0.5074275810844269, + "grad_norm": 3.36882301209385, + "learning_rate": 1.5368838671073533e-06, + "loss": 0.5857, + "step": 12297 + }, + { + "epoch": 0.5074688454237848, + "grad_norm": 3.44188860147958, + "learning_rate": 1.536683451389403e-06, + "loss": 0.5668, + "step": 12298 + }, + { + "epoch": 0.5075101097631427, + "grad_norm": 2.4200437978702403, + "learning_rate": 1.536483035016194e-06, + "loss": 0.4815, + "step": 12299 + }, + { + "epoch": 0.5075513741025006, + "grad_norm": 2.5481960541309707, + "learning_rate": 1.536282617991307e-06, + "loss": 0.459, + "step": 12300 + }, + { + "epoch": 0.5075926384418585, + "grad_norm": 3.196483034884311, + "learning_rate": 1.5360822003183216e-06, + "loss": 0.4634, + "step": 12301 + }, + { + "epoch": 0.5076339027812165, + "grad_norm": 4.980064529809487, + "learning_rate": 1.5358817820008178e-06, + "loss": 0.506, + "step": 12302 + }, + { + "epoch": 0.5076751671205744, + "grad_norm": 2.737184033296605, + "learning_rate": 1.5356813630423756e-06, + "loss": 0.4949, + "step": 12303 + }, + { + "epoch": 0.5077164314599323, + "grad_norm": 2.559812058733186, + "learning_rate": 1.5354809434465752e-06, + "loss": 0.5085, + "step": 12304 + }, + { + "epoch": 0.5077576957992902, + "grad_norm": 2.583299793270431, + "learning_rate": 1.535280523216996e-06, + "loss": 0.547, + "step": 12305 + }, + { + "epoch": 0.5077989601386482, + "grad_norm": 3.9034151896216223, + "learning_rate": 1.5350801023572187e-06, + "loss": 0.5653, + "step": 12306 + }, + { + "epoch": 0.5078402244780061, + "grad_norm": 4.448321332898422, + "learning_rate": 1.534879680870823e-06, + "loss": 0.5872, + "step": 12307 + }, + { + "epoch": 0.5078814888173641, + "grad_norm": 3.8273383193218145, + "learning_rate": 1.5346792587613893e-06, + "loss": 0.5069, + "step": 12308 + }, + { + "epoch": 0.507922753156722, + "grad_norm": 4.282539901086792, + "learning_rate": 1.5344788360324964e-06, + "loss": 0.4997, + "step": 12309 + }, + { + "epoch": 0.5079640174960799, + "grad_norm": 16.28157401125814, + "learning_rate": 1.5342784126877259e-06, + "loss": 0.5041, + "step": 12310 + }, + { + "epoch": 0.5080052818354378, + "grad_norm": 2.0100626205674548, + "learning_rate": 1.5340779887306568e-06, + "loss": 0.5342, + "step": 12311 + }, + { + "epoch": 0.5080465461747957, + "grad_norm": 9.780881689661578, + "learning_rate": 1.5338775641648699e-06, + "loss": 0.5213, + "step": 12312 + }, + { + "epoch": 0.5080878105141536, + "grad_norm": 3.289515488460711, + "learning_rate": 1.5336771389939446e-06, + "loss": 0.4794, + "step": 12313 + }, + { + "epoch": 0.5081290748535116, + "grad_norm": 4.575401684690134, + "learning_rate": 1.533476713221462e-06, + "loss": 0.5965, + "step": 12314 + }, + { + "epoch": 0.5081703391928695, + "grad_norm": 3.2839869176846186, + "learning_rate": 1.533276286851001e-06, + "loss": 0.5198, + "step": 12315 + }, + { + "epoch": 0.5082116035322275, + "grad_norm": 3.293313647783786, + "learning_rate": 1.5330758598861426e-06, + "loss": 0.5541, + "step": 12316 + }, + { + "epoch": 0.5082528678715854, + "grad_norm": 4.043071478743755, + "learning_rate": 1.532875432330466e-06, + "loss": 0.5015, + "step": 12317 + }, + { + "epoch": 0.5082941322109433, + "grad_norm": 3.592367507692748, + "learning_rate": 1.5326750041875525e-06, + "loss": 0.4921, + "step": 12318 + }, + { + "epoch": 0.5083353965503012, + "grad_norm": 4.449179433334867, + "learning_rate": 1.5324745754609813e-06, + "loss": 0.5346, + "step": 12319 + }, + { + "epoch": 0.5083766608896592, + "grad_norm": 3.1927092304793185, + "learning_rate": 1.5322741461543334e-06, + "loss": 0.52, + "step": 12320 + }, + { + "epoch": 0.5084179252290171, + "grad_norm": 2.8953642473276124, + "learning_rate": 1.5320737162711885e-06, + "loss": 0.5576, + "step": 12321 + }, + { + "epoch": 0.508459189568375, + "grad_norm": 2.643751934099933, + "learning_rate": 1.5318732858151264e-06, + "loss": 0.5237, + "step": 12322 + }, + { + "epoch": 0.5085004539077329, + "grad_norm": 2.3849157156271854, + "learning_rate": 1.5316728547897279e-06, + "loss": 0.5615, + "step": 12323 + }, + { + "epoch": 0.5085417182470908, + "grad_norm": 8.987280637658198, + "learning_rate": 1.5314724231985728e-06, + "loss": 0.5754, + "step": 12324 + }, + { + "epoch": 0.5085829825864487, + "grad_norm": 1.6486185209191928, + "learning_rate": 1.5312719910452417e-06, + "loss": 0.5254, + "step": 12325 + }, + { + "epoch": 0.5086242469258068, + "grad_norm": 3.1974011551968093, + "learning_rate": 1.5310715583333149e-06, + "loss": 0.4228, + "step": 12326 + }, + { + "epoch": 0.5086655112651647, + "grad_norm": 2.5613154388279646, + "learning_rate": 1.5308711250663718e-06, + "loss": 0.4926, + "step": 12327 + }, + { + "epoch": 0.5087067756045226, + "grad_norm": 5.536859628476915, + "learning_rate": 1.5306706912479934e-06, + "loss": 0.5728, + "step": 12328 + }, + { + "epoch": 0.5087480399438805, + "grad_norm": 2.1016987679327768, + "learning_rate": 1.5304702568817597e-06, + "loss": 0.4987, + "step": 12329 + }, + { + "epoch": 0.5087893042832384, + "grad_norm": 5.798402078677681, + "learning_rate": 1.5302698219712508e-06, + "loss": 0.5557, + "step": 12330 + }, + { + "epoch": 0.5088305686225963, + "grad_norm": 2.4562459944825563, + "learning_rate": 1.5300693865200474e-06, + "loss": 0.539, + "step": 12331 + }, + { + "epoch": 0.5088718329619543, + "grad_norm": 2.2744992760558205, + "learning_rate": 1.5298689505317294e-06, + "loss": 0.5641, + "step": 12332 + }, + { + "epoch": 0.5089130973013122, + "grad_norm": 5.996188509610763, + "learning_rate": 1.5296685140098773e-06, + "loss": 0.5317, + "step": 12333 + }, + { + "epoch": 0.5089543616406701, + "grad_norm": 3.3617705615984814, + "learning_rate": 1.5294680769580713e-06, + "loss": 0.5497, + "step": 12334 + }, + { + "epoch": 0.508995625980028, + "grad_norm": 2.6266054380224175, + "learning_rate": 1.5292676393798919e-06, + "loss": 0.511, + "step": 12335 + }, + { + "epoch": 0.509036890319386, + "grad_norm": 5.095309146958642, + "learning_rate": 1.5290672012789187e-06, + "loss": 0.5129, + "step": 12336 + }, + { + "epoch": 0.509078154658744, + "grad_norm": 5.866399750654944, + "learning_rate": 1.5288667626587333e-06, + "loss": 0.5244, + "step": 12337 + }, + { + "epoch": 0.5091194189981019, + "grad_norm": 3.141270581214141, + "learning_rate": 1.5286663235229145e-06, + "loss": 0.5345, + "step": 12338 + }, + { + "epoch": 0.5091606833374598, + "grad_norm": 19.1581443325854, + "learning_rate": 1.5284658838750442e-06, + "loss": 0.4742, + "step": 12339 + }, + { + "epoch": 0.5092019476768177, + "grad_norm": 3.203530008034422, + "learning_rate": 1.528265443718702e-06, + "loss": 0.5166, + "step": 12340 + }, + { + "epoch": 0.5092432120161756, + "grad_norm": 3.3509688733115555, + "learning_rate": 1.528065003057468e-06, + "loss": 0.5487, + "step": 12341 + }, + { + "epoch": 0.5092844763555335, + "grad_norm": 3.949211574148635, + "learning_rate": 1.5278645618949226e-06, + "loss": 0.5375, + "step": 12342 + }, + { + "epoch": 0.5093257406948914, + "grad_norm": 2.152792674200683, + "learning_rate": 1.5276641202346466e-06, + "loss": 0.4873, + "step": 12343 + }, + { + "epoch": 0.5093670050342494, + "grad_norm": 2.4649454849036005, + "learning_rate": 1.52746367808022e-06, + "loss": 0.5268, + "step": 12344 + }, + { + "epoch": 0.5094082693736073, + "grad_norm": 12.28373457489225, + "learning_rate": 1.5272632354352239e-06, + "loss": 0.5333, + "step": 12345 + }, + { + "epoch": 0.5094495337129653, + "grad_norm": 2.631390141513439, + "learning_rate": 1.5270627923032377e-06, + "loss": 0.5049, + "step": 12346 + }, + { + "epoch": 0.5094907980523232, + "grad_norm": 4.606570945009297, + "learning_rate": 1.526862348687843e-06, + "loss": 0.5587, + "step": 12347 + }, + { + "epoch": 0.5095320623916811, + "grad_norm": 2.4272337951651615, + "learning_rate": 1.5266619045926188e-06, + "loss": 0.5407, + "step": 12348 + }, + { + "epoch": 0.5095733267310391, + "grad_norm": 5.340514057149251, + "learning_rate": 1.5264614600211469e-06, + "loss": 0.5303, + "step": 12349 + }, + { + "epoch": 0.509614591070397, + "grad_norm": 4.240244038763944, + "learning_rate": 1.5262610149770067e-06, + "loss": 0.4915, + "step": 12350 + }, + { + "epoch": 0.5096558554097549, + "grad_norm": 2.4186074178423267, + "learning_rate": 1.5260605694637794e-06, + "loss": 0.5807, + "step": 12351 + }, + { + "epoch": 0.5096971197491128, + "grad_norm": 2.8878203728830862, + "learning_rate": 1.5258601234850447e-06, + "loss": 0.5376, + "step": 12352 + }, + { + "epoch": 0.5097383840884707, + "grad_norm": 10.814496163857843, + "learning_rate": 1.5256596770443837e-06, + "loss": 0.5878, + "step": 12353 + }, + { + "epoch": 0.5097796484278286, + "grad_norm": 2.211542875619024, + "learning_rate": 1.5254592301453769e-06, + "loss": 0.5187, + "step": 12354 + }, + { + "epoch": 0.5098209127671866, + "grad_norm": 3.54711870740217, + "learning_rate": 1.5252587827916046e-06, + "loss": 0.6271, + "step": 12355 + }, + { + "epoch": 0.5098621771065446, + "grad_norm": 3.201637750415047, + "learning_rate": 1.525058334986647e-06, + "loss": 0.4809, + "step": 12356 + }, + { + "epoch": 0.5099034414459025, + "grad_norm": 2.5859410373551355, + "learning_rate": 1.5248578867340848e-06, + "loss": 0.529, + "step": 12357 + }, + { + "epoch": 0.5099447057852604, + "grad_norm": 2.571228138965759, + "learning_rate": 1.5246574380374985e-06, + "loss": 0.5444, + "step": 12358 + }, + { + "epoch": 0.5099859701246183, + "grad_norm": 5.360076085667652, + "learning_rate": 1.524456988900469e-06, + "loss": 0.5137, + "step": 12359 + }, + { + "epoch": 0.5100272344639762, + "grad_norm": 7.8686407245904695, + "learning_rate": 1.5242565393265767e-06, + "loss": 0.4904, + "step": 12360 + }, + { + "epoch": 0.5100684988033342, + "grad_norm": 4.734750791530382, + "learning_rate": 1.524056089319401e-06, + "loss": 0.5017, + "step": 12361 + }, + { + "epoch": 0.5101097631426921, + "grad_norm": 6.138996023378204, + "learning_rate": 1.5238556388825244e-06, + "loss": 0.5425, + "step": 12362 + }, + { + "epoch": 0.51015102748205, + "grad_norm": 2.570168183348394, + "learning_rate": 1.5236551880195254e-06, + "loss": 0.4751, + "step": 12363 + }, + { + "epoch": 0.5101922918214079, + "grad_norm": 2.061651963690623, + "learning_rate": 1.5234547367339863e-06, + "loss": 0.4778, + "step": 12364 + }, + { + "epoch": 0.5102335561607658, + "grad_norm": 1.9827924227311637, + "learning_rate": 1.5232542850294867e-06, + "loss": 0.5273, + "step": 12365 + }, + { + "epoch": 0.5102748205001237, + "grad_norm": 4.001260961336473, + "learning_rate": 1.5230538329096078e-06, + "loss": 0.5203, + "step": 12366 + }, + { + "epoch": 0.5103160848394818, + "grad_norm": 3.3064385756417933, + "learning_rate": 1.522853380377929e-06, + "loss": 0.4708, + "step": 12367 + }, + { + "epoch": 0.5103573491788397, + "grad_norm": 2.186240028925915, + "learning_rate": 1.5226529274380322e-06, + "loss": 0.5277, + "step": 12368 + }, + { + "epoch": 0.5103986135181976, + "grad_norm": 2.802345951237188, + "learning_rate": 1.5224524740934974e-06, + "loss": 0.5609, + "step": 12369 + }, + { + "epoch": 0.5104398778575555, + "grad_norm": 11.34041066569087, + "learning_rate": 1.5222520203479048e-06, + "loss": 0.5405, + "step": 12370 + }, + { + "epoch": 0.5104811421969134, + "grad_norm": 3.323505935992393, + "learning_rate": 1.5220515662048357e-06, + "loss": 0.5794, + "step": 12371 + }, + { + "epoch": 0.5105224065362713, + "grad_norm": 80.27356429978173, + "learning_rate": 1.5218511116678706e-06, + "loss": 0.5424, + "step": 12372 + }, + { + "epoch": 0.5105636708756293, + "grad_norm": 2.122022616053584, + "learning_rate": 1.5216506567405903e-06, + "loss": 0.5093, + "step": 12373 + }, + { + "epoch": 0.5106049352149872, + "grad_norm": 10.682963062535364, + "learning_rate": 1.5214502014265747e-06, + "loss": 0.4954, + "step": 12374 + }, + { + "epoch": 0.5106461995543451, + "grad_norm": 2.127194544340083, + "learning_rate": 1.5212497457294047e-06, + "loss": 0.4806, + "step": 12375 + }, + { + "epoch": 0.510687463893703, + "grad_norm": 3.018837511642611, + "learning_rate": 1.5210492896526614e-06, + "loss": 0.5467, + "step": 12376 + }, + { + "epoch": 0.510728728233061, + "grad_norm": 5.024119034549426, + "learning_rate": 1.5208488331999249e-06, + "loss": 0.509, + "step": 12377 + }, + { + "epoch": 0.510769992572419, + "grad_norm": 2.799358954344728, + "learning_rate": 1.5206483763747765e-06, + "loss": 0.5926, + "step": 12378 + }, + { + "epoch": 0.5108112569117769, + "grad_norm": 3.8154381899977037, + "learning_rate": 1.5204479191807957e-06, + "loss": 0.5238, + "step": 12379 + }, + { + "epoch": 0.5108525212511348, + "grad_norm": 2.333102113510107, + "learning_rate": 1.520247461621565e-06, + "loss": 0.5853, + "step": 12380 + }, + { + "epoch": 0.5108937855904927, + "grad_norm": 2.9568737436250885, + "learning_rate": 1.5200470037006629e-06, + "loss": 0.4974, + "step": 12381 + }, + { + "epoch": 0.5109350499298506, + "grad_norm": 2.7094018595607237, + "learning_rate": 1.5198465454216719e-06, + "loss": 0.5324, + "step": 12382 + }, + { + "epoch": 0.5109763142692085, + "grad_norm": 9.433677665779976, + "learning_rate": 1.5196460867881713e-06, + "loss": 0.5364, + "step": 12383 + }, + { + "epoch": 0.5110175786085664, + "grad_norm": 3.1464259617572528, + "learning_rate": 1.5194456278037428e-06, + "loss": 0.5418, + "step": 12384 + }, + { + "epoch": 0.5110588429479244, + "grad_norm": 4.149971111621668, + "learning_rate": 1.5192451684719665e-06, + "loss": 0.5377, + "step": 12385 + }, + { + "epoch": 0.5111001072872823, + "grad_norm": 3.2625787170077523, + "learning_rate": 1.5190447087964244e-06, + "loss": 0.5905, + "step": 12386 + }, + { + "epoch": 0.5111413716266403, + "grad_norm": 2.610997823817387, + "learning_rate": 1.5188442487806948e-06, + "loss": 0.4902, + "step": 12387 + }, + { + "epoch": 0.5111826359659982, + "grad_norm": 3.431432480954819, + "learning_rate": 1.5186437884283606e-06, + "loss": 0.5302, + "step": 12388 + }, + { + "epoch": 0.5112239003053561, + "grad_norm": 4.490747381613221, + "learning_rate": 1.518443327743001e-06, + "loss": 0.5471, + "step": 12389 + }, + { + "epoch": 0.511265164644714, + "grad_norm": 16.097665700242313, + "learning_rate": 1.518242866728198e-06, + "loss": 0.5248, + "step": 12390 + }, + { + "epoch": 0.511306428984072, + "grad_norm": 5.0092065611775425, + "learning_rate": 1.5180424053875314e-06, + "loss": 0.6228, + "step": 12391 + }, + { + "epoch": 0.5113476933234299, + "grad_norm": 2.937025836312902, + "learning_rate": 1.5178419437245826e-06, + "loss": 0.4883, + "step": 12392 + }, + { + "epoch": 0.5113889576627878, + "grad_norm": 14.40798761215949, + "learning_rate": 1.5176414817429323e-06, + "loss": 0.4804, + "step": 12393 + }, + { + "epoch": 0.5114302220021457, + "grad_norm": 2.6549047338670486, + "learning_rate": 1.517441019446161e-06, + "loss": 0.4726, + "step": 12394 + }, + { + "epoch": 0.5114714863415036, + "grad_norm": 3.6942311691480616, + "learning_rate": 1.5172405568378486e-06, + "loss": 0.5053, + "step": 12395 + }, + { + "epoch": 0.5115127506808615, + "grad_norm": 3.6099916194483734, + "learning_rate": 1.5170400939215773e-06, + "loss": 0.5062, + "step": 12396 + }, + { + "epoch": 0.5115540150202196, + "grad_norm": 3.343636521204847, + "learning_rate": 1.5168396307009272e-06, + "loss": 0.5176, + "step": 12397 + }, + { + "epoch": 0.5115952793595775, + "grad_norm": 2.7478286642342944, + "learning_rate": 1.5166391671794792e-06, + "loss": 0.5413, + "step": 12398 + }, + { + "epoch": 0.5116365436989354, + "grad_norm": 1.949423227789047, + "learning_rate": 1.5164387033608141e-06, + "loss": 0.5178, + "step": 12399 + }, + { + "epoch": 0.5116778080382933, + "grad_norm": 3.0421947516518326, + "learning_rate": 1.5162382392485126e-06, + "loss": 0.5307, + "step": 12400 + }, + { + "epoch": 0.5117190723776512, + "grad_norm": 4.259253517978624, + "learning_rate": 1.5160377748461556e-06, + "loss": 0.5126, + "step": 12401 + }, + { + "epoch": 0.5117603367170092, + "grad_norm": 3.736936973106643, + "learning_rate": 1.5158373101573236e-06, + "loss": 0.5206, + "step": 12402 + }, + { + "epoch": 0.5118016010563671, + "grad_norm": 6.339230698383836, + "learning_rate": 1.515636845185598e-06, + "loss": 0.4691, + "step": 12403 + }, + { + "epoch": 0.511842865395725, + "grad_norm": 2.501435955504189, + "learning_rate": 1.5154363799345588e-06, + "loss": 0.52, + "step": 12404 + }, + { + "epoch": 0.5118841297350829, + "grad_norm": 4.098679011878851, + "learning_rate": 1.5152359144077875e-06, + "loss": 0.5857, + "step": 12405 + }, + { + "epoch": 0.5119253940744408, + "grad_norm": 3.5445278794974655, + "learning_rate": 1.5150354486088646e-06, + "loss": 0.5271, + "step": 12406 + }, + { + "epoch": 0.5119666584137988, + "grad_norm": 3.764383411647313, + "learning_rate": 1.514834982541371e-06, + "loss": 0.5292, + "step": 12407 + }, + { + "epoch": 0.5120079227531568, + "grad_norm": 2.658811295268366, + "learning_rate": 1.5146345162088871e-06, + "loss": 0.5145, + "step": 12408 + }, + { + "epoch": 0.5120491870925147, + "grad_norm": 3.2810052085378802, + "learning_rate": 1.5144340496149946e-06, + "loss": 0.5777, + "step": 12409 + }, + { + "epoch": 0.5120904514318726, + "grad_norm": 7.17275572478122, + "learning_rate": 1.5142335827632738e-06, + "loss": 0.5409, + "step": 12410 + }, + { + "epoch": 0.5121317157712305, + "grad_norm": 2.5483574034723584, + "learning_rate": 1.5140331156573056e-06, + "loss": 0.5544, + "step": 12411 + }, + { + "epoch": 0.5121729801105884, + "grad_norm": 9.579437992998546, + "learning_rate": 1.5138326483006707e-06, + "loss": 0.541, + "step": 12412 + }, + { + "epoch": 0.5122142444499463, + "grad_norm": 2.7928230801848044, + "learning_rate": 1.5136321806969504e-06, + "loss": 0.539, + "step": 12413 + }, + { + "epoch": 0.5122555087893043, + "grad_norm": 3.8859391342608545, + "learning_rate": 1.513431712849725e-06, + "loss": 0.5268, + "step": 12414 + }, + { + "epoch": 0.5122967731286622, + "grad_norm": 3.4039817299198534, + "learning_rate": 1.5132312447625757e-06, + "loss": 0.568, + "step": 12415 + }, + { + "epoch": 0.5123380374680201, + "grad_norm": 2.9401978216553255, + "learning_rate": 1.5130307764390832e-06, + "loss": 0.588, + "step": 12416 + }, + { + "epoch": 0.5123793018073781, + "grad_norm": 3.1754305207575286, + "learning_rate": 1.5128303078828284e-06, + "loss": 0.504, + "step": 12417 + }, + { + "epoch": 0.512420566146736, + "grad_norm": 6.298927568474257, + "learning_rate": 1.5126298390973923e-06, + "loss": 0.52, + "step": 12418 + }, + { + "epoch": 0.512461830486094, + "grad_norm": 2.180522467639784, + "learning_rate": 1.512429370086356e-06, + "loss": 0.5283, + "step": 12419 + }, + { + "epoch": 0.5125030948254519, + "grad_norm": 2.9306950591872307, + "learning_rate": 1.5122289008532996e-06, + "loss": 0.5449, + "step": 12420 + }, + { + "epoch": 0.5125443591648098, + "grad_norm": 3.7474208788303853, + "learning_rate": 1.5120284314018047e-06, + "loss": 0.5511, + "step": 12421 + }, + { + "epoch": 0.5125856235041677, + "grad_norm": 4.1228270543221734, + "learning_rate": 1.5118279617354514e-06, + "loss": 0.5397, + "step": 12422 + }, + { + "epoch": 0.5126268878435256, + "grad_norm": 6.869530447535477, + "learning_rate": 1.5116274918578218e-06, + "loss": 0.5066, + "step": 12423 + }, + { + "epoch": 0.5126681521828835, + "grad_norm": 5.160029562545851, + "learning_rate": 1.5114270217724954e-06, + "loss": 0.5144, + "step": 12424 + }, + { + "epoch": 0.5127094165222414, + "grad_norm": 5.514295420095933, + "learning_rate": 1.5112265514830543e-06, + "loss": 0.5341, + "step": 12425 + }, + { + "epoch": 0.5127506808615994, + "grad_norm": 4.6950958746132905, + "learning_rate": 1.511026080993079e-06, + "loss": 0.5125, + "step": 12426 + }, + { + "epoch": 0.5127919452009573, + "grad_norm": 2.668467594063583, + "learning_rate": 1.5108256103061503e-06, + "loss": 0.5122, + "step": 12427 + }, + { + "epoch": 0.5128332095403153, + "grad_norm": 2.40421592350514, + "learning_rate": 1.5106251394258486e-06, + "loss": 0.5292, + "step": 12428 + }, + { + "epoch": 0.5128744738796732, + "grad_norm": 11.28747444793713, + "learning_rate": 1.5104246683557556e-06, + "loss": 0.5101, + "step": 12429 + }, + { + "epoch": 0.5129157382190311, + "grad_norm": 2.706230879184513, + "learning_rate": 1.510224197099452e-06, + "loss": 0.5168, + "step": 12430 + }, + { + "epoch": 0.512957002558389, + "grad_norm": 2.444638162062009, + "learning_rate": 1.5100237256605188e-06, + "loss": 0.5491, + "step": 12431 + }, + { + "epoch": 0.512998266897747, + "grad_norm": 3.630792025186572, + "learning_rate": 1.509823254042537e-06, + "loss": 0.4611, + "step": 12432 + }, + { + "epoch": 0.5130395312371049, + "grad_norm": 35.43352564743052, + "learning_rate": 1.509622782249087e-06, + "loss": 0.515, + "step": 12433 + }, + { + "epoch": 0.5130807955764628, + "grad_norm": 13.258306139079545, + "learning_rate": 1.5094223102837498e-06, + "loss": 0.4909, + "step": 12434 + }, + { + "epoch": 0.5131220599158207, + "grad_norm": 2.3579698511972995, + "learning_rate": 1.5092218381501066e-06, + "loss": 0.515, + "step": 12435 + }, + { + "epoch": 0.5131633242551786, + "grad_norm": 2.4335615274478317, + "learning_rate": 1.5090213658517386e-06, + "loss": 0.5549, + "step": 12436 + }, + { + "epoch": 0.5132045885945365, + "grad_norm": 3.0569816511100645, + "learning_rate": 1.5088208933922264e-06, + "loss": 0.4832, + "step": 12437 + }, + { + "epoch": 0.5132458529338946, + "grad_norm": 5.401463688449228, + "learning_rate": 1.5086204207751506e-06, + "loss": 0.5285, + "step": 12438 + }, + { + "epoch": 0.5132871172732525, + "grad_norm": 4.071172683946452, + "learning_rate": 1.508419948004093e-06, + "loss": 0.5853, + "step": 12439 + }, + { + "epoch": 0.5133283816126104, + "grad_norm": 7.434106053210968, + "learning_rate": 1.5082194750826336e-06, + "loss": 0.5127, + "step": 12440 + }, + { + "epoch": 0.5133696459519683, + "grad_norm": 3.8043024460956016, + "learning_rate": 1.5080190020143541e-06, + "loss": 0.4922, + "step": 12441 + }, + { + "epoch": 0.5134109102913262, + "grad_norm": 2.003381708328839, + "learning_rate": 1.5078185288028353e-06, + "loss": 0.5039, + "step": 12442 + }, + { + "epoch": 0.5134521746306842, + "grad_norm": 2.864173338556158, + "learning_rate": 1.507618055451658e-06, + "loss": 0.4987, + "step": 12443 + }, + { + "epoch": 0.5134934389700421, + "grad_norm": 2.0991801327449036, + "learning_rate": 1.507417581964403e-06, + "loss": 0.5329, + "step": 12444 + }, + { + "epoch": 0.5135347033094, + "grad_norm": 2.882632728007336, + "learning_rate": 1.5072171083446513e-06, + "loss": 0.4699, + "step": 12445 + }, + { + "epoch": 0.5135759676487579, + "grad_norm": 5.453155660656089, + "learning_rate": 1.5070166345959844e-06, + "loss": 0.5478, + "step": 12446 + }, + { + "epoch": 0.5136172319881158, + "grad_norm": 2.3683537186957393, + "learning_rate": 1.5068161607219822e-06, + "loss": 0.5072, + "step": 12447 + }, + { + "epoch": 0.5136584963274738, + "grad_norm": 3.043892589010248, + "learning_rate": 1.506615686726227e-06, + "loss": 0.4994, + "step": 12448 + }, + { + "epoch": 0.5136997606668318, + "grad_norm": 3.680120853933836, + "learning_rate": 1.5064152126122985e-06, + "loss": 0.5208, + "step": 12449 + }, + { + "epoch": 0.5137410250061897, + "grad_norm": 21.775308179789786, + "learning_rate": 1.5062147383837787e-06, + "loss": 0.555, + "step": 12450 + }, + { + "epoch": 0.5137822893455476, + "grad_norm": 3.9921011088298566, + "learning_rate": 1.5060142640442479e-06, + "loss": 0.4734, + "step": 12451 + }, + { + "epoch": 0.5138235536849055, + "grad_norm": 2.4109060755037475, + "learning_rate": 1.5058137895972878e-06, + "loss": 0.5366, + "step": 12452 + }, + { + "epoch": 0.5138648180242634, + "grad_norm": 2.418818615256686, + "learning_rate": 1.5056133150464781e-06, + "loss": 0.5056, + "step": 12453 + }, + { + "epoch": 0.5139060823636213, + "grad_norm": 7.914829085376681, + "learning_rate": 1.5054128403954011e-06, + "loss": 0.5185, + "step": 12454 + }, + { + "epoch": 0.5139473467029793, + "grad_norm": 2.122988473540184, + "learning_rate": 1.505212365647637e-06, + "loss": 0.5005, + "step": 12455 + }, + { + "epoch": 0.5139886110423372, + "grad_norm": 2.216841495609192, + "learning_rate": 1.505011890806767e-06, + "loss": 0.5242, + "step": 12456 + }, + { + "epoch": 0.5140298753816951, + "grad_norm": 3.339736164127851, + "learning_rate": 1.504811415876372e-06, + "loss": 0.4937, + "step": 12457 + }, + { + "epoch": 0.5140711397210531, + "grad_norm": 2.8208163728519304, + "learning_rate": 1.5046109408600339e-06, + "loss": 0.5181, + "step": 12458 + }, + { + "epoch": 0.514112404060411, + "grad_norm": 2.0309229556085695, + "learning_rate": 1.5044104657613317e-06, + "loss": 0.5324, + "step": 12459 + }, + { + "epoch": 0.514153668399769, + "grad_norm": 3.7651258965070573, + "learning_rate": 1.5042099905838481e-06, + "loss": 0.5185, + "step": 12460 + }, + { + "epoch": 0.5141949327391269, + "grad_norm": 3.036574721544063, + "learning_rate": 1.5040095153311632e-06, + "loss": 0.5709, + "step": 12461 + }, + { + "epoch": 0.5142361970784848, + "grad_norm": 3.786251250613555, + "learning_rate": 1.5038090400068591e-06, + "loss": 0.5507, + "step": 12462 + }, + { + "epoch": 0.5142774614178427, + "grad_norm": 2.5475880970493288, + "learning_rate": 1.5036085646145152e-06, + "loss": 0.5618, + "step": 12463 + }, + { + "epoch": 0.5143187257572006, + "grad_norm": 2.940761601590352, + "learning_rate": 1.5034080891577136e-06, + "loss": 0.5009, + "step": 12464 + }, + { + "epoch": 0.5143599900965585, + "grad_norm": 3.215055895515694, + "learning_rate": 1.503207613640035e-06, + "loss": 0.5426, + "step": 12465 + }, + { + "epoch": 0.5144012544359164, + "grad_norm": 2.5481885407944294, + "learning_rate": 1.5030071380650607e-06, + "loss": 0.5777, + "step": 12466 + }, + { + "epoch": 0.5144425187752744, + "grad_norm": 2.6410879759251125, + "learning_rate": 1.502806662436371e-06, + "loss": 0.5623, + "step": 12467 + }, + { + "epoch": 0.5144837831146324, + "grad_norm": 11.862522182908451, + "learning_rate": 1.5026061867575473e-06, + "loss": 0.5505, + "step": 12468 + }, + { + "epoch": 0.5145250474539903, + "grad_norm": 3.5067254922983415, + "learning_rate": 1.5024057110321706e-06, + "loss": 0.5353, + "step": 12469 + }, + { + "epoch": 0.5145663117933482, + "grad_norm": 4.865434225991863, + "learning_rate": 1.5022052352638222e-06, + "loss": 0.5368, + "step": 12470 + }, + { + "epoch": 0.5146075761327061, + "grad_norm": 2.933130589699865, + "learning_rate": 1.5020047594560823e-06, + "loss": 0.5475, + "step": 12471 + }, + { + "epoch": 0.514648840472064, + "grad_norm": 2.720458909125836, + "learning_rate": 1.5018042836125333e-06, + "loss": 0.5493, + "step": 12472 + }, + { + "epoch": 0.514690104811422, + "grad_norm": 2.6000924610949614, + "learning_rate": 1.501603807736754e-06, + "loss": 0.5304, + "step": 12473 + }, + { + "epoch": 0.5147313691507799, + "grad_norm": 3.0254435937273936, + "learning_rate": 1.5014033318323277e-06, + "loss": 0.5039, + "step": 12474 + }, + { + "epoch": 0.5147726334901378, + "grad_norm": 2.3328226399577323, + "learning_rate": 1.5012028559028338e-06, + "loss": 0.5927, + "step": 12475 + }, + { + "epoch": 0.5148138978294957, + "grad_norm": 12.81589933993018, + "learning_rate": 1.5010023799518543e-06, + "loss": 0.5699, + "step": 12476 + }, + { + "epoch": 0.5148551621688536, + "grad_norm": 2.932106547385739, + "learning_rate": 1.5008019039829691e-06, + "loss": 0.5076, + "step": 12477 + }, + { + "epoch": 0.5148964265082117, + "grad_norm": 6.9209311193236775, + "learning_rate": 1.5006014279997605e-06, + "loss": 0.5097, + "step": 12478 + }, + { + "epoch": 0.5149376908475696, + "grad_norm": 2.6590205939100224, + "learning_rate": 1.5004009520058089e-06, + "loss": 0.5649, + "step": 12479 + }, + { + "epoch": 0.5149789551869275, + "grad_norm": 6.361559421961135, + "learning_rate": 1.5002004760046948e-06, + "loss": 0.4987, + "step": 12480 + }, + { + "epoch": 0.5150202195262854, + "grad_norm": 3.2396884000658286, + "learning_rate": 1.5e-06, + "loss": 0.5475, + "step": 12481 + }, + { + "epoch": 0.5150614838656433, + "grad_norm": 2.5196458540976825, + "learning_rate": 1.4997995239953053e-06, + "loss": 0.5016, + "step": 12482 + }, + { + "epoch": 0.5151027482050012, + "grad_norm": 5.7121340952759505, + "learning_rate": 1.4995990479941914e-06, + "loss": 0.5689, + "step": 12483 + }, + { + "epoch": 0.5151440125443592, + "grad_norm": 3.171568766578117, + "learning_rate": 1.4993985720002398e-06, + "loss": 0.4963, + "step": 12484 + }, + { + "epoch": 0.5151852768837171, + "grad_norm": 5.531884819514349, + "learning_rate": 1.499198096017031e-06, + "loss": 0.5547, + "step": 12485 + }, + { + "epoch": 0.515226541223075, + "grad_norm": 2.2822782332564793, + "learning_rate": 1.4989976200481462e-06, + "loss": 0.5448, + "step": 12486 + }, + { + "epoch": 0.5152678055624329, + "grad_norm": 2.8877262972540687, + "learning_rate": 1.4987971440971663e-06, + "loss": 0.5366, + "step": 12487 + }, + { + "epoch": 0.5153090699017908, + "grad_norm": 4.479359792607225, + "learning_rate": 1.498596668167673e-06, + "loss": 0.5263, + "step": 12488 + }, + { + "epoch": 0.5153503342411488, + "grad_norm": 5.223091640391125, + "learning_rate": 1.4983961922632465e-06, + "loss": 0.5433, + "step": 12489 + }, + { + "epoch": 0.5153915985805068, + "grad_norm": 4.347457565955976, + "learning_rate": 1.4981957163874676e-06, + "loss": 0.5079, + "step": 12490 + }, + { + "epoch": 0.5154328629198647, + "grad_norm": 9.735338080773449, + "learning_rate": 1.497995240543918e-06, + "loss": 0.5027, + "step": 12491 + }, + { + "epoch": 0.5154741272592226, + "grad_norm": 6.472292226502369, + "learning_rate": 1.497794764736178e-06, + "loss": 0.5654, + "step": 12492 + }, + { + "epoch": 0.5155153915985805, + "grad_norm": 3.671177094423298, + "learning_rate": 1.4975942889678297e-06, + "loss": 0.5026, + "step": 12493 + }, + { + "epoch": 0.5155566559379384, + "grad_norm": 5.321859812796636, + "learning_rate": 1.4973938132424528e-06, + "loss": 0.599, + "step": 12494 + }, + { + "epoch": 0.5155979202772963, + "grad_norm": 3.170488570277525, + "learning_rate": 1.4971933375636292e-06, + "loss": 0.503, + "step": 12495 + }, + { + "epoch": 0.5156391846166543, + "grad_norm": 17.162786908731892, + "learning_rate": 1.4969928619349396e-06, + "loss": 0.5593, + "step": 12496 + }, + { + "epoch": 0.5156804489560122, + "grad_norm": 3.5503744073745964, + "learning_rate": 1.496792386359965e-06, + "loss": 0.5414, + "step": 12497 + }, + { + "epoch": 0.5157217132953701, + "grad_norm": 2.2875647810984487, + "learning_rate": 1.4965919108422863e-06, + "loss": 0.503, + "step": 12498 + }, + { + "epoch": 0.5157629776347281, + "grad_norm": 4.573491404750687, + "learning_rate": 1.4963914353854848e-06, + "loss": 0.5031, + "step": 12499 + }, + { + "epoch": 0.515804241974086, + "grad_norm": 5.020782436405233, + "learning_rate": 1.496190959993141e-06, + "loss": 0.5149, + "step": 12500 + }, + { + "epoch": 0.515845506313444, + "grad_norm": 2.5476560463004283, + "learning_rate": 1.495990484668837e-06, + "loss": 0.5133, + "step": 12501 + }, + { + "epoch": 0.5158867706528019, + "grad_norm": 12.416634699972901, + "learning_rate": 1.4957900094161524e-06, + "loss": 0.4986, + "step": 12502 + }, + { + "epoch": 0.5159280349921598, + "grad_norm": 2.5353894462582045, + "learning_rate": 1.4955895342386688e-06, + "loss": 0.5218, + "step": 12503 + }, + { + "epoch": 0.5159692993315177, + "grad_norm": 3.7522545072229843, + "learning_rate": 1.495389059139967e-06, + "loss": 0.514, + "step": 12504 + }, + { + "epoch": 0.5160105636708756, + "grad_norm": 5.876680721326719, + "learning_rate": 1.4951885841236283e-06, + "loss": 0.4546, + "step": 12505 + }, + { + "epoch": 0.5160518280102335, + "grad_norm": 2.376445089206321, + "learning_rate": 1.4949881091932332e-06, + "loss": 0.4305, + "step": 12506 + }, + { + "epoch": 0.5160930923495914, + "grad_norm": 3.219486276593857, + "learning_rate": 1.4947876343523633e-06, + "loss": 0.5361, + "step": 12507 + }, + { + "epoch": 0.5161343566889494, + "grad_norm": 3.696095265277161, + "learning_rate": 1.494587159604599e-06, + "loss": 0.565, + "step": 12508 + }, + { + "epoch": 0.5161756210283074, + "grad_norm": 8.454115647641554, + "learning_rate": 1.494386684953522e-06, + "loss": 0.5249, + "step": 12509 + }, + { + "epoch": 0.5162168853676653, + "grad_norm": 7.185090276345766, + "learning_rate": 1.4941862104027125e-06, + "loss": 0.4808, + "step": 12510 + }, + { + "epoch": 0.5162581497070232, + "grad_norm": 3.3085984438961913, + "learning_rate": 1.4939857359557522e-06, + "loss": 0.5597, + "step": 12511 + }, + { + "epoch": 0.5162994140463811, + "grad_norm": 2.6976336148607234, + "learning_rate": 1.4937852616162211e-06, + "loss": 0.5172, + "step": 12512 + }, + { + "epoch": 0.516340678385739, + "grad_norm": 3.4730199899713305, + "learning_rate": 1.4935847873877013e-06, + "loss": 0.526, + "step": 12513 + }, + { + "epoch": 0.516381942725097, + "grad_norm": 5.448080673940787, + "learning_rate": 1.4933843132737737e-06, + "loss": 0.5396, + "step": 12514 + }, + { + "epoch": 0.5164232070644549, + "grad_norm": 10.553870147297445, + "learning_rate": 1.493183839278018e-06, + "loss": 0.5687, + "step": 12515 + }, + { + "epoch": 0.5164644714038128, + "grad_norm": 6.090272326404066, + "learning_rate": 1.4929833654040163e-06, + "loss": 0.4728, + "step": 12516 + }, + { + "epoch": 0.5165057357431707, + "grad_norm": 3.369405648456575, + "learning_rate": 1.4927828916553488e-06, + "loss": 0.4774, + "step": 12517 + }, + { + "epoch": 0.5165470000825286, + "grad_norm": 3.395414576508878, + "learning_rate": 1.4925824180355975e-06, + "loss": 0.5567, + "step": 12518 + }, + { + "epoch": 0.5165882644218867, + "grad_norm": 6.603094820439789, + "learning_rate": 1.4923819445483423e-06, + "loss": 0.5562, + "step": 12519 + }, + { + "epoch": 0.5166295287612446, + "grad_norm": 2.248595671645581, + "learning_rate": 1.492181471197165e-06, + "loss": 0.4986, + "step": 12520 + }, + { + "epoch": 0.5166707931006025, + "grad_norm": 2.3864816206277752, + "learning_rate": 1.4919809979856461e-06, + "loss": 0.5017, + "step": 12521 + }, + { + "epoch": 0.5167120574399604, + "grad_norm": 5.993447789043735, + "learning_rate": 1.4917805249173665e-06, + "loss": 0.4997, + "step": 12522 + }, + { + "epoch": 0.5167533217793183, + "grad_norm": 2.689263264192032, + "learning_rate": 1.4915800519959075e-06, + "loss": 0.5352, + "step": 12523 + }, + { + "epoch": 0.5167945861186762, + "grad_norm": 2.496761850009541, + "learning_rate": 1.4913795792248495e-06, + "loss": 0.5086, + "step": 12524 + }, + { + "epoch": 0.5168358504580342, + "grad_norm": 2.9161483792972254, + "learning_rate": 1.491179106607774e-06, + "loss": 0.5097, + "step": 12525 + }, + { + "epoch": 0.5168771147973921, + "grad_norm": 3.085990819203179, + "learning_rate": 1.4909786341482615e-06, + "loss": 0.5499, + "step": 12526 + }, + { + "epoch": 0.51691837913675, + "grad_norm": 2.5858603791966757, + "learning_rate": 1.4907781618498934e-06, + "loss": 0.4975, + "step": 12527 + }, + { + "epoch": 0.5169596434761079, + "grad_norm": 2.1458123374272406, + "learning_rate": 1.4905776897162509e-06, + "loss": 0.4793, + "step": 12528 + }, + { + "epoch": 0.5170009078154659, + "grad_norm": 10.304957062842389, + "learning_rate": 1.4903772177509138e-06, + "loss": 0.537, + "step": 12529 + }, + { + "epoch": 0.5170421721548238, + "grad_norm": 2.3202684432986436, + "learning_rate": 1.490176745957464e-06, + "loss": 0.5257, + "step": 12530 + }, + { + "epoch": 0.5170834364941818, + "grad_norm": 2.359678803568983, + "learning_rate": 1.4899762743394813e-06, + "loss": 0.4733, + "step": 12531 + }, + { + "epoch": 0.5171247008335397, + "grad_norm": 2.6900854734154467, + "learning_rate": 1.4897758029005483e-06, + "loss": 0.5472, + "step": 12532 + }, + { + "epoch": 0.5171659651728976, + "grad_norm": 2.6831455147402363, + "learning_rate": 1.4895753316442445e-06, + "loss": 0.5198, + "step": 12533 + }, + { + "epoch": 0.5172072295122555, + "grad_norm": 2.1779619356598077, + "learning_rate": 1.4893748605741517e-06, + "loss": 0.5054, + "step": 12534 + }, + { + "epoch": 0.5172484938516134, + "grad_norm": 3.223116913910027, + "learning_rate": 1.48917438969385e-06, + "loss": 0.5962, + "step": 12535 + }, + { + "epoch": 0.5172897581909713, + "grad_norm": 6.1883014500067945, + "learning_rate": 1.4889739190069213e-06, + "loss": 0.4717, + "step": 12536 + }, + { + "epoch": 0.5173310225303293, + "grad_norm": 2.740066080116232, + "learning_rate": 1.4887734485169456e-06, + "loss": 0.5151, + "step": 12537 + }, + { + "epoch": 0.5173722868696872, + "grad_norm": 3.211999017063861, + "learning_rate": 1.4885729782275046e-06, + "loss": 0.5106, + "step": 12538 + }, + { + "epoch": 0.5174135512090452, + "grad_norm": 2.422464171429378, + "learning_rate": 1.4883725081421783e-06, + "loss": 0.4756, + "step": 12539 + }, + { + "epoch": 0.5174548155484031, + "grad_norm": 8.587863354271692, + "learning_rate": 1.4881720382645485e-06, + "loss": 0.549, + "step": 12540 + }, + { + "epoch": 0.517496079887761, + "grad_norm": 2.771267205305299, + "learning_rate": 1.487971568598196e-06, + "loss": 0.4964, + "step": 12541 + }, + { + "epoch": 0.517537344227119, + "grad_norm": 2.520879365372635, + "learning_rate": 1.487771099146701e-06, + "loss": 0.4663, + "step": 12542 + }, + { + "epoch": 0.5175786085664769, + "grad_norm": 3.062739055004936, + "learning_rate": 1.4875706299136447e-06, + "loss": 0.5083, + "step": 12543 + }, + { + "epoch": 0.5176198729058348, + "grad_norm": 2.6638148896964893, + "learning_rate": 1.4873701609026082e-06, + "loss": 0.5407, + "step": 12544 + }, + { + "epoch": 0.5176611372451927, + "grad_norm": 3.963717425217781, + "learning_rate": 1.4871696921171716e-06, + "loss": 0.5666, + "step": 12545 + }, + { + "epoch": 0.5177024015845506, + "grad_norm": 2.532870426964057, + "learning_rate": 1.486969223560917e-06, + "loss": 0.4754, + "step": 12546 + }, + { + "epoch": 0.5177436659239085, + "grad_norm": 2.15316038020793, + "learning_rate": 1.4867687552374243e-06, + "loss": 0.5172, + "step": 12547 + }, + { + "epoch": 0.5177849302632664, + "grad_norm": 7.015775200276749, + "learning_rate": 1.4865682871502752e-06, + "loss": 0.5169, + "step": 12548 + }, + { + "epoch": 0.5178261946026244, + "grad_norm": 2.334228152563302, + "learning_rate": 1.4863678193030497e-06, + "loss": 0.495, + "step": 12549 + }, + { + "epoch": 0.5178674589419824, + "grad_norm": 2.3627423919971875, + "learning_rate": 1.4861673516993294e-06, + "loss": 0.4876, + "step": 12550 + }, + { + "epoch": 0.5179087232813403, + "grad_norm": 15.847955363173957, + "learning_rate": 1.4859668843426943e-06, + "loss": 0.5353, + "step": 12551 + }, + { + "epoch": 0.5179499876206982, + "grad_norm": 2.0356411534620373, + "learning_rate": 1.485766417236726e-06, + "loss": 0.4808, + "step": 12552 + }, + { + "epoch": 0.5179912519600561, + "grad_norm": 2.105528391104287, + "learning_rate": 1.485565950385005e-06, + "loss": 0.5435, + "step": 12553 + }, + { + "epoch": 0.518032516299414, + "grad_norm": 2.380978898086306, + "learning_rate": 1.485365483791113e-06, + "loss": 0.5719, + "step": 12554 + }, + { + "epoch": 0.518073780638772, + "grad_norm": 4.215073016903963, + "learning_rate": 1.4851650174586296e-06, + "loss": 0.4877, + "step": 12555 + }, + { + "epoch": 0.5181150449781299, + "grad_norm": 3.20070170583244, + "learning_rate": 1.4849645513911357e-06, + "loss": 0.5466, + "step": 12556 + }, + { + "epoch": 0.5181563093174878, + "grad_norm": 3.50438621922081, + "learning_rate": 1.484764085592213e-06, + "loss": 0.5926, + "step": 12557 + }, + { + "epoch": 0.5181975736568457, + "grad_norm": 8.677061678041532, + "learning_rate": 1.4845636200654415e-06, + "loss": 0.5186, + "step": 12558 + }, + { + "epoch": 0.5182388379962036, + "grad_norm": 7.087850571877364, + "learning_rate": 1.4843631548144024e-06, + "loss": 0.4995, + "step": 12559 + }, + { + "epoch": 0.5182801023355617, + "grad_norm": 3.365941974786058, + "learning_rate": 1.4841626898426769e-06, + "loss": 0.5265, + "step": 12560 + }, + { + "epoch": 0.5183213666749196, + "grad_norm": 2.287978935987124, + "learning_rate": 1.4839622251538447e-06, + "loss": 0.5016, + "step": 12561 + }, + { + "epoch": 0.5183626310142775, + "grad_norm": 10.896463943183116, + "learning_rate": 1.4837617607514877e-06, + "loss": 0.5134, + "step": 12562 + }, + { + "epoch": 0.5184038953536354, + "grad_norm": 2.641492416717296, + "learning_rate": 1.483561296639186e-06, + "loss": 0.5174, + "step": 12563 + }, + { + "epoch": 0.5184451596929933, + "grad_norm": 3.3556359712989114, + "learning_rate": 1.4833608328205209e-06, + "loss": 0.5362, + "step": 12564 + }, + { + "epoch": 0.5184864240323512, + "grad_norm": 5.895318310960605, + "learning_rate": 1.4831603692990727e-06, + "loss": 0.5719, + "step": 12565 + }, + { + "epoch": 0.5185276883717092, + "grad_norm": 4.502377361333668, + "learning_rate": 1.4829599060784228e-06, + "loss": 0.5297, + "step": 12566 + }, + { + "epoch": 0.5185689527110671, + "grad_norm": 1.774606791577978, + "learning_rate": 1.482759443162152e-06, + "loss": 0.5503, + "step": 12567 + }, + { + "epoch": 0.518610217050425, + "grad_norm": 2.172348170082371, + "learning_rate": 1.48255898055384e-06, + "loss": 0.4733, + "step": 12568 + }, + { + "epoch": 0.5186514813897829, + "grad_norm": 2.888022215503927, + "learning_rate": 1.4823585182570684e-06, + "loss": 0.5022, + "step": 12569 + }, + { + "epoch": 0.5186927457291409, + "grad_norm": 2.674550941764927, + "learning_rate": 1.4821580562754175e-06, + "loss": 0.4832, + "step": 12570 + }, + { + "epoch": 0.5187340100684988, + "grad_norm": 3.062251036528392, + "learning_rate": 1.4819575946124688e-06, + "loss": 0.51, + "step": 12571 + }, + { + "epoch": 0.5187752744078568, + "grad_norm": 3.554835079096851, + "learning_rate": 1.481757133271802e-06, + "loss": 0.5547, + "step": 12572 + }, + { + "epoch": 0.5188165387472147, + "grad_norm": 17.532801663781868, + "learning_rate": 1.4815566722569992e-06, + "loss": 0.5111, + "step": 12573 + }, + { + "epoch": 0.5188578030865726, + "grad_norm": 3.484312758805298, + "learning_rate": 1.4813562115716397e-06, + "loss": 0.5605, + "step": 12574 + }, + { + "epoch": 0.5188990674259305, + "grad_norm": 2.100894202740227, + "learning_rate": 1.4811557512193053e-06, + "loss": 0.5378, + "step": 12575 + }, + { + "epoch": 0.5189403317652884, + "grad_norm": 3.6306716386551314, + "learning_rate": 1.4809552912035761e-06, + "loss": 0.4855, + "step": 12576 + }, + { + "epoch": 0.5189815961046463, + "grad_norm": 10.082997699905395, + "learning_rate": 1.4807548315280332e-06, + "loss": 0.5107, + "step": 12577 + }, + { + "epoch": 0.5190228604440043, + "grad_norm": 3.609865864733043, + "learning_rate": 1.4805543721962568e-06, + "loss": 0.5252, + "step": 12578 + }, + { + "epoch": 0.5190641247833622, + "grad_norm": 3.1357854722759186, + "learning_rate": 1.4803539132118286e-06, + "loss": 0.4979, + "step": 12579 + }, + { + "epoch": 0.5191053891227202, + "grad_norm": 5.896003267163727, + "learning_rate": 1.4801534545783289e-06, + "loss": 0.5742, + "step": 12580 + }, + { + "epoch": 0.5191466534620781, + "grad_norm": 2.479555033907037, + "learning_rate": 1.4799529962993376e-06, + "loss": 0.5128, + "step": 12581 + }, + { + "epoch": 0.519187917801436, + "grad_norm": 2.3497465625135696, + "learning_rate": 1.4797525383784357e-06, + "loss": 0.5318, + "step": 12582 + }, + { + "epoch": 0.5192291821407939, + "grad_norm": 2.2272750779611106, + "learning_rate": 1.4795520808192043e-06, + "loss": 0.4893, + "step": 12583 + }, + { + "epoch": 0.5192704464801519, + "grad_norm": 2.4370573940093068, + "learning_rate": 1.479351623625224e-06, + "loss": 0.5772, + "step": 12584 + }, + { + "epoch": 0.5193117108195098, + "grad_norm": 2.9366334044242457, + "learning_rate": 1.4791511668000752e-06, + "loss": 0.5794, + "step": 12585 + }, + { + "epoch": 0.5193529751588677, + "grad_norm": 2.1005677783468735, + "learning_rate": 1.4789507103473386e-06, + "loss": 0.5615, + "step": 12586 + }, + { + "epoch": 0.5193942394982256, + "grad_norm": 3.1607135877414785, + "learning_rate": 1.4787502542705954e-06, + "loss": 0.5399, + "step": 12587 + }, + { + "epoch": 0.5194355038375835, + "grad_norm": 2.054925573864336, + "learning_rate": 1.4785497985734253e-06, + "loss": 0.4485, + "step": 12588 + }, + { + "epoch": 0.5194767681769414, + "grad_norm": 4.097787761491127, + "learning_rate": 1.47834934325941e-06, + "loss": 0.5523, + "step": 12589 + }, + { + "epoch": 0.5195180325162995, + "grad_norm": 2.2826442154780655, + "learning_rate": 1.478148888332129e-06, + "loss": 0.525, + "step": 12590 + }, + { + "epoch": 0.5195592968556574, + "grad_norm": 6.200803282064195, + "learning_rate": 1.4779484337951641e-06, + "loss": 0.5141, + "step": 12591 + }, + { + "epoch": 0.5196005611950153, + "grad_norm": 2.4357810666997826, + "learning_rate": 1.4777479796520948e-06, + "loss": 0.6059, + "step": 12592 + }, + { + "epoch": 0.5196418255343732, + "grad_norm": 2.9126224241323224, + "learning_rate": 1.477547525906503e-06, + "loss": 0.5664, + "step": 12593 + }, + { + "epoch": 0.5196830898737311, + "grad_norm": 3.984107382490119, + "learning_rate": 1.4773470725619681e-06, + "loss": 0.5031, + "step": 12594 + }, + { + "epoch": 0.519724354213089, + "grad_norm": 3.921272082015783, + "learning_rate": 1.4771466196220712e-06, + "loss": 0.5428, + "step": 12595 + }, + { + "epoch": 0.519765618552447, + "grad_norm": 3.570082926672456, + "learning_rate": 1.4769461670903929e-06, + "loss": 0.5104, + "step": 12596 + }, + { + "epoch": 0.5198068828918049, + "grad_norm": 2.348241111780027, + "learning_rate": 1.4767457149705134e-06, + "loss": 0.5314, + "step": 12597 + }, + { + "epoch": 0.5198481472311628, + "grad_norm": 4.251185317932634, + "learning_rate": 1.4765452632660138e-06, + "loss": 0.5198, + "step": 12598 + }, + { + "epoch": 0.5198894115705207, + "grad_norm": 2.5167925436521537, + "learning_rate": 1.476344811980475e-06, + "loss": 0.5795, + "step": 12599 + }, + { + "epoch": 0.5199306759098787, + "grad_norm": 2.3031304767703875, + "learning_rate": 1.476144361117476e-06, + "loss": 0.5416, + "step": 12600 + }, + { + "epoch": 0.5199719402492367, + "grad_norm": 3.4134705341360085, + "learning_rate": 1.4759439106805992e-06, + "loss": 0.5261, + "step": 12601 + }, + { + "epoch": 0.5200132045885946, + "grad_norm": 3.506750952412057, + "learning_rate": 1.4757434606734238e-06, + "loss": 0.5149, + "step": 12602 + }, + { + "epoch": 0.5200544689279525, + "grad_norm": 2.179098136833189, + "learning_rate": 1.4755430110995312e-06, + "loss": 0.4587, + "step": 12603 + }, + { + "epoch": 0.5200957332673104, + "grad_norm": 7.9528456541424974, + "learning_rate": 1.4753425619625013e-06, + "loss": 0.5252, + "step": 12604 + }, + { + "epoch": 0.5201369976066683, + "grad_norm": 12.79504030826102, + "learning_rate": 1.4751421132659152e-06, + "loss": 0.5533, + "step": 12605 + }, + { + "epoch": 0.5201782619460262, + "grad_norm": 2.649516819771333, + "learning_rate": 1.4749416650133538e-06, + "loss": 0.4986, + "step": 12606 + }, + { + "epoch": 0.5202195262853841, + "grad_norm": 3.055353090230145, + "learning_rate": 1.474741217208396e-06, + "loss": 0.518, + "step": 12607 + }, + { + "epoch": 0.5202607906247421, + "grad_norm": 2.711724924950688, + "learning_rate": 1.4745407698546236e-06, + "loss": 0.5101, + "step": 12608 + }, + { + "epoch": 0.5203020549641, + "grad_norm": 2.1939267180945716, + "learning_rate": 1.4743403229556164e-06, + "loss": 0.5926, + "step": 12609 + }, + { + "epoch": 0.5203433193034579, + "grad_norm": 5.915260994554094, + "learning_rate": 1.4741398765149556e-06, + "loss": 0.5413, + "step": 12610 + }, + { + "epoch": 0.5203845836428159, + "grad_norm": 2.247027575671885, + "learning_rate": 1.473939430536221e-06, + "loss": 0.5152, + "step": 12611 + }, + { + "epoch": 0.5204258479821738, + "grad_norm": 8.413337552935339, + "learning_rate": 1.4737389850229936e-06, + "loss": 0.5471, + "step": 12612 + }, + { + "epoch": 0.5204671123215318, + "grad_norm": 2.290128570126627, + "learning_rate": 1.4735385399788534e-06, + "loss": 0.4408, + "step": 12613 + }, + { + "epoch": 0.5205083766608897, + "grad_norm": 2.9477099706127103, + "learning_rate": 1.4733380954073813e-06, + "loss": 0.5155, + "step": 12614 + }, + { + "epoch": 0.5205496410002476, + "grad_norm": 2.988876587126066, + "learning_rate": 1.4731376513121573e-06, + "loss": 0.5373, + "step": 12615 + }, + { + "epoch": 0.5205909053396055, + "grad_norm": 2.44785326764364, + "learning_rate": 1.4729372076967624e-06, + "loss": 0.5688, + "step": 12616 + }, + { + "epoch": 0.5206321696789634, + "grad_norm": 2.4699684972352305, + "learning_rate": 1.472736764564776e-06, + "loss": 0.483, + "step": 12617 + }, + { + "epoch": 0.5206734340183213, + "grad_norm": 3.4023171200217135, + "learning_rate": 1.47253632191978e-06, + "loss": 0.5061, + "step": 12618 + }, + { + "epoch": 0.5207146983576793, + "grad_norm": 3.1204259102010083, + "learning_rate": 1.4723358797653532e-06, + "loss": 0.5578, + "step": 12619 + }, + { + "epoch": 0.5207559626970372, + "grad_norm": 2.453151536812164, + "learning_rate": 1.4721354381050781e-06, + "loss": 0.5386, + "step": 12620 + }, + { + "epoch": 0.5207972270363952, + "grad_norm": 3.4667406151352402, + "learning_rate": 1.4719349969425327e-06, + "loss": 0.5398, + "step": 12621 + }, + { + "epoch": 0.5208384913757531, + "grad_norm": 3.1496148894436486, + "learning_rate": 1.4717345562812988e-06, + "loss": 0.5952, + "step": 12622 + }, + { + "epoch": 0.520879755715111, + "grad_norm": 4.196604098380902, + "learning_rate": 1.471534116124956e-06, + "loss": 0.5344, + "step": 12623 + }, + { + "epoch": 0.5209210200544689, + "grad_norm": 2.0646961390465157, + "learning_rate": 1.4713336764770856e-06, + "loss": 0.4614, + "step": 12624 + }, + { + "epoch": 0.5209622843938269, + "grad_norm": 3.473989483564633, + "learning_rate": 1.4711332373412672e-06, + "loss": 0.4952, + "step": 12625 + }, + { + "epoch": 0.5210035487331848, + "grad_norm": 2.8551644015436897, + "learning_rate": 1.4709327987210814e-06, + "loss": 0.5308, + "step": 12626 + }, + { + "epoch": 0.5210448130725427, + "grad_norm": 1.9594816590139583, + "learning_rate": 1.4707323606201084e-06, + "loss": 0.5608, + "step": 12627 + }, + { + "epoch": 0.5210860774119006, + "grad_norm": 1.7387726244494786, + "learning_rate": 1.470531923041929e-06, + "loss": 0.5786, + "step": 12628 + }, + { + "epoch": 0.5211273417512585, + "grad_norm": 2.1991371677674914, + "learning_rate": 1.4703314859901228e-06, + "loss": 0.4752, + "step": 12629 + }, + { + "epoch": 0.5211686060906164, + "grad_norm": 25.47280914798376, + "learning_rate": 1.4701310494682707e-06, + "loss": 0.5561, + "step": 12630 + }, + { + "epoch": 0.5212098704299745, + "grad_norm": 2.269424611533451, + "learning_rate": 1.4699306134799525e-06, + "loss": 0.4885, + "step": 12631 + }, + { + "epoch": 0.5212511347693324, + "grad_norm": 2.451849134455568, + "learning_rate": 1.4697301780287492e-06, + "loss": 0.5112, + "step": 12632 + }, + { + "epoch": 0.5212923991086903, + "grad_norm": 4.01578590553292, + "learning_rate": 1.469529743118241e-06, + "loss": 0.5032, + "step": 12633 + }, + { + "epoch": 0.5213336634480482, + "grad_norm": 2.805602122497697, + "learning_rate": 1.4693293087520071e-06, + "loss": 0.5342, + "step": 12634 + }, + { + "epoch": 0.5213749277874061, + "grad_norm": 6.378023880273675, + "learning_rate": 1.4691288749336287e-06, + "loss": 0.6033, + "step": 12635 + }, + { + "epoch": 0.521416192126764, + "grad_norm": 2.4244363501580963, + "learning_rate": 1.4689284416666856e-06, + "loss": 0.5195, + "step": 12636 + }, + { + "epoch": 0.521457456466122, + "grad_norm": 3.6289013854719188, + "learning_rate": 1.4687280089547586e-06, + "loss": 0.5216, + "step": 12637 + }, + { + "epoch": 0.5214987208054799, + "grad_norm": 4.567284057507275, + "learning_rate": 1.4685275768014275e-06, + "loss": 0.507, + "step": 12638 + }, + { + "epoch": 0.5215399851448378, + "grad_norm": 2.6214613301693923, + "learning_rate": 1.4683271452102724e-06, + "loss": 0.5152, + "step": 12639 + }, + { + "epoch": 0.5215812494841957, + "grad_norm": 4.158866908209562, + "learning_rate": 1.468126714184874e-06, + "loss": 0.5276, + "step": 12640 + }, + { + "epoch": 0.5216225138235537, + "grad_norm": 3.3470272393111777, + "learning_rate": 1.4679262837288118e-06, + "loss": 0.5447, + "step": 12641 + }, + { + "epoch": 0.5216637781629117, + "grad_norm": 3.0121184263165057, + "learning_rate": 1.4677258538456667e-06, + "loss": 0.5101, + "step": 12642 + }, + { + "epoch": 0.5217050425022696, + "grad_norm": 5.879262103931731, + "learning_rate": 1.4675254245390186e-06, + "loss": 0.5188, + "step": 12643 + }, + { + "epoch": 0.5217463068416275, + "grad_norm": 2.185489607505191, + "learning_rate": 1.4673249958124476e-06, + "loss": 0.5172, + "step": 12644 + }, + { + "epoch": 0.5217875711809854, + "grad_norm": 4.1363051351676345, + "learning_rate": 1.4671245676695338e-06, + "loss": 0.5378, + "step": 12645 + }, + { + "epoch": 0.5218288355203433, + "grad_norm": 3.562232666061212, + "learning_rate": 1.4669241401138582e-06, + "loss": 0.5232, + "step": 12646 + }, + { + "epoch": 0.5218700998597012, + "grad_norm": 2.7251061734387054, + "learning_rate": 1.4667237131489997e-06, + "loss": 0.5263, + "step": 12647 + }, + { + "epoch": 0.5219113641990591, + "grad_norm": 3.778592581834757, + "learning_rate": 1.4665232867785385e-06, + "loss": 0.4939, + "step": 12648 + }, + { + "epoch": 0.5219526285384171, + "grad_norm": 3.6581875943326065, + "learning_rate": 1.4663228610060555e-06, + "loss": 0.5153, + "step": 12649 + }, + { + "epoch": 0.521993892877775, + "grad_norm": 4.5848649524869876, + "learning_rate": 1.4661224358351304e-06, + "loss": 0.5255, + "step": 12650 + }, + { + "epoch": 0.522035157217133, + "grad_norm": 2.6055775996331643, + "learning_rate": 1.4659220112693435e-06, + "loss": 0.5245, + "step": 12651 + }, + { + "epoch": 0.5220764215564909, + "grad_norm": 3.751490085895336, + "learning_rate": 1.4657215873122744e-06, + "loss": 0.5209, + "step": 12652 + }, + { + "epoch": 0.5221176858958488, + "grad_norm": 6.313351474062236, + "learning_rate": 1.465521163967504e-06, + "loss": 0.5709, + "step": 12653 + }, + { + "epoch": 0.5221589502352068, + "grad_norm": 1.9375279033272788, + "learning_rate": 1.4653207412386112e-06, + "loss": 0.5159, + "step": 12654 + }, + { + "epoch": 0.5222002145745647, + "grad_norm": 2.82549271856869, + "learning_rate": 1.465120319129177e-06, + "loss": 0.5749, + "step": 12655 + }, + { + "epoch": 0.5222414789139226, + "grad_norm": 2.9661408210122078, + "learning_rate": 1.4649198976427811e-06, + "loss": 0.5524, + "step": 12656 + }, + { + "epoch": 0.5222827432532805, + "grad_norm": 14.939292053644161, + "learning_rate": 1.464719476783004e-06, + "loss": 0.5612, + "step": 12657 + }, + { + "epoch": 0.5223240075926384, + "grad_norm": 2.6776322444018494, + "learning_rate": 1.4645190565534247e-06, + "loss": 0.5368, + "step": 12658 + }, + { + "epoch": 0.5223652719319963, + "grad_norm": 2.102754118398342, + "learning_rate": 1.464318636957625e-06, + "loss": 0.5071, + "step": 12659 + }, + { + "epoch": 0.5224065362713542, + "grad_norm": 2.6694926424714898, + "learning_rate": 1.4641182179991827e-06, + "loss": 0.5084, + "step": 12660 + }, + { + "epoch": 0.5224478006107123, + "grad_norm": 2.4256828992159907, + "learning_rate": 1.463917799681679e-06, + "loss": 0.5027, + "step": 12661 + }, + { + "epoch": 0.5224890649500702, + "grad_norm": 2.8506310748537294, + "learning_rate": 1.4637173820086933e-06, + "loss": 0.5227, + "step": 12662 + }, + { + "epoch": 0.5225303292894281, + "grad_norm": 3.593936098713567, + "learning_rate": 1.4635169649838065e-06, + "loss": 0.5187, + "step": 12663 + }, + { + "epoch": 0.522571593628786, + "grad_norm": 2.1967308331975515, + "learning_rate": 1.4633165486105974e-06, + "loss": 0.5164, + "step": 12664 + }, + { + "epoch": 0.5226128579681439, + "grad_norm": 2.0818046926662954, + "learning_rate": 1.4631161328926468e-06, + "loss": 0.5188, + "step": 12665 + }, + { + "epoch": 0.5226541223075019, + "grad_norm": 7.019893145670873, + "learning_rate": 1.4629157178335344e-06, + "loss": 0.5057, + "step": 12666 + }, + { + "epoch": 0.5226953866468598, + "grad_norm": 2.180169336553783, + "learning_rate": 1.46271530343684e-06, + "loss": 0.5241, + "step": 12667 + }, + { + "epoch": 0.5227366509862177, + "grad_norm": 2.7396594495594933, + "learning_rate": 1.4625148897061433e-06, + "loss": 0.5041, + "step": 12668 + }, + { + "epoch": 0.5227779153255756, + "grad_norm": 4.07268122845368, + "learning_rate": 1.4623144766450249e-06, + "loss": 0.539, + "step": 12669 + }, + { + "epoch": 0.5228191796649335, + "grad_norm": 2.82797527503709, + "learning_rate": 1.4621140642570636e-06, + "loss": 0.5243, + "step": 12670 + }, + { + "epoch": 0.5228604440042914, + "grad_norm": 2.780099028355861, + "learning_rate": 1.4619136525458406e-06, + "loss": 0.5031, + "step": 12671 + }, + { + "epoch": 0.5229017083436495, + "grad_norm": 2.514969943536838, + "learning_rate": 1.461713241514935e-06, + "loss": 0.5443, + "step": 12672 + }, + { + "epoch": 0.5229429726830074, + "grad_norm": 2.4654083813747785, + "learning_rate": 1.4615128311679264e-06, + "loss": 0.5466, + "step": 12673 + }, + { + "epoch": 0.5229842370223653, + "grad_norm": 3.4303623085299213, + "learning_rate": 1.461312421508395e-06, + "loss": 0.5223, + "step": 12674 + }, + { + "epoch": 0.5230255013617232, + "grad_norm": 2.072090324466339, + "learning_rate": 1.4611120125399202e-06, + "loss": 0.5412, + "step": 12675 + }, + { + "epoch": 0.5230667657010811, + "grad_norm": 3.5637417785622802, + "learning_rate": 1.4609116042660825e-06, + "loss": 0.5483, + "step": 12676 + }, + { + "epoch": 0.523108030040439, + "grad_norm": 5.826519682695953, + "learning_rate": 1.4607111966904615e-06, + "loss": 0.4693, + "step": 12677 + }, + { + "epoch": 0.523149294379797, + "grad_norm": 3.402339867985933, + "learning_rate": 1.4605107898166364e-06, + "loss": 0.5504, + "step": 12678 + }, + { + "epoch": 0.5231905587191549, + "grad_norm": 4.829335223539219, + "learning_rate": 1.4603103836481876e-06, + "loss": 0.5264, + "step": 12679 + }, + { + "epoch": 0.5232318230585128, + "grad_norm": 10.478675003909183, + "learning_rate": 1.4601099781886944e-06, + "loss": 0.5161, + "step": 12680 + }, + { + "epoch": 0.5232730873978707, + "grad_norm": 2.917421756739302, + "learning_rate": 1.4599095734417369e-06, + "loss": 0.5579, + "step": 12681 + }, + { + "epoch": 0.5233143517372287, + "grad_norm": 3.565687275544634, + "learning_rate": 1.4597091694108947e-06, + "loss": 0.4792, + "step": 12682 + }, + { + "epoch": 0.5233556160765866, + "grad_norm": 13.637689744564607, + "learning_rate": 1.4595087660997477e-06, + "loss": 0.495, + "step": 12683 + }, + { + "epoch": 0.5233968804159446, + "grad_norm": 2.2479075986343813, + "learning_rate": 1.459308363511875e-06, + "loss": 0.5329, + "step": 12684 + }, + { + "epoch": 0.5234381447553025, + "grad_norm": 3.6062476152219767, + "learning_rate": 1.4591079616508577e-06, + "loss": 0.5577, + "step": 12685 + }, + { + "epoch": 0.5234794090946604, + "grad_norm": 2.1528553148516103, + "learning_rate": 1.458907560520274e-06, + "loss": 0.561, + "step": 12686 + }, + { + "epoch": 0.5235206734340183, + "grad_norm": 3.536842790275692, + "learning_rate": 1.4587071601237035e-06, + "loss": 0.5231, + "step": 12687 + }, + { + "epoch": 0.5235619377733762, + "grad_norm": 2.2311001110801727, + "learning_rate": 1.458506760464727e-06, + "loss": 0.4947, + "step": 12688 + }, + { + "epoch": 0.5236032021127341, + "grad_norm": 2.684959733885512, + "learning_rate": 1.4583063615469232e-06, + "loss": 0.5286, + "step": 12689 + }, + { + "epoch": 0.5236444664520921, + "grad_norm": 2.535055946415464, + "learning_rate": 1.4581059633738723e-06, + "loss": 0.524, + "step": 12690 + }, + { + "epoch": 0.52368573079145, + "grad_norm": 3.216275708991336, + "learning_rate": 1.4579055659491536e-06, + "loss": 0.5254, + "step": 12691 + }, + { + "epoch": 0.523726995130808, + "grad_norm": 4.404498896984451, + "learning_rate": 1.4577051692763469e-06, + "loss": 0.4793, + "step": 12692 + }, + { + "epoch": 0.5237682594701659, + "grad_norm": 2.50771463639342, + "learning_rate": 1.4575047733590315e-06, + "loss": 0.5198, + "step": 12693 + }, + { + "epoch": 0.5238095238095238, + "grad_norm": 2.856377027983991, + "learning_rate": 1.4573043782007873e-06, + "loss": 0.5507, + "step": 12694 + }, + { + "epoch": 0.5238507881488818, + "grad_norm": 2.9168747085517563, + "learning_rate": 1.4571039838051935e-06, + "loss": 0.5462, + "step": 12695 + }, + { + "epoch": 0.5238920524882397, + "grad_norm": 2.980112871997153, + "learning_rate": 1.4569035901758301e-06, + "loss": 0.5123, + "step": 12696 + }, + { + "epoch": 0.5239333168275976, + "grad_norm": 2.873285281215494, + "learning_rate": 1.4567031973162764e-06, + "loss": 0.5403, + "step": 12697 + }, + { + "epoch": 0.5239745811669555, + "grad_norm": 9.28953701387598, + "learning_rate": 1.4565028052301124e-06, + "loss": 0.5639, + "step": 12698 + }, + { + "epoch": 0.5240158455063134, + "grad_norm": 3.6525294880310866, + "learning_rate": 1.4563024139209166e-06, + "loss": 0.5275, + "step": 12699 + }, + { + "epoch": 0.5240571098456713, + "grad_norm": 2.3937106019611196, + "learning_rate": 1.4561020233922692e-06, + "loss": 0.5065, + "step": 12700 + }, + { + "epoch": 0.5240983741850292, + "grad_norm": 3.3187478351927897, + "learning_rate": 1.455901633647749e-06, + "loss": 0.573, + "step": 12701 + }, + { + "epoch": 0.5241396385243873, + "grad_norm": 2.5701641564365336, + "learning_rate": 1.4557012446909362e-06, + "loss": 0.526, + "step": 12702 + }, + { + "epoch": 0.5241809028637452, + "grad_norm": 3.046209659674668, + "learning_rate": 1.4555008565254099e-06, + "loss": 0.5405, + "step": 12703 + }, + { + "epoch": 0.5242221672031031, + "grad_norm": 2.506080392118043, + "learning_rate": 1.4553004691547497e-06, + "loss": 0.5939, + "step": 12704 + }, + { + "epoch": 0.524263431542461, + "grad_norm": 7.383104121357456, + "learning_rate": 1.4551000825825348e-06, + "loss": 0.5358, + "step": 12705 + }, + { + "epoch": 0.5243046958818189, + "grad_norm": 3.5045630847930713, + "learning_rate": 1.454899696812345e-06, + "loss": 0.5533, + "step": 12706 + }, + { + "epoch": 0.5243459602211769, + "grad_norm": 3.101081796966408, + "learning_rate": 1.4546993118477592e-06, + "loss": 0.488, + "step": 12707 + }, + { + "epoch": 0.5243872245605348, + "grad_norm": 6.9285251290511205, + "learning_rate": 1.4544989276923571e-06, + "loss": 0.5456, + "step": 12708 + }, + { + "epoch": 0.5244284888998927, + "grad_norm": 2.40700272088534, + "learning_rate": 1.4542985443497179e-06, + "loss": 0.4718, + "step": 12709 + }, + { + "epoch": 0.5244697532392506, + "grad_norm": 3.1471327172285832, + "learning_rate": 1.4540981618234212e-06, + "loss": 0.5405, + "step": 12710 + }, + { + "epoch": 0.5245110175786085, + "grad_norm": 2.214346409333535, + "learning_rate": 1.453897780117046e-06, + "loss": 0.5662, + "step": 12711 + }, + { + "epoch": 0.5245522819179665, + "grad_norm": 15.58614810391802, + "learning_rate": 1.4536973992341724e-06, + "loss": 0.5038, + "step": 12712 + }, + { + "epoch": 0.5245935462573245, + "grad_norm": 1.6934569673673723, + "learning_rate": 1.4534970191783786e-06, + "loss": 0.4999, + "step": 12713 + }, + { + "epoch": 0.5246348105966824, + "grad_norm": 3.0011669631666775, + "learning_rate": 1.4532966399532442e-06, + "loss": 0.5566, + "step": 12714 + }, + { + "epoch": 0.5246760749360403, + "grad_norm": 2.573031693310015, + "learning_rate": 1.453096261562349e-06, + "loss": 0.4845, + "step": 12715 + }, + { + "epoch": 0.5247173392753982, + "grad_norm": 1.7013078843678737, + "learning_rate": 1.4528958840092718e-06, + "loss": 0.4627, + "step": 12716 + }, + { + "epoch": 0.5247586036147561, + "grad_norm": 2.5918092879711043, + "learning_rate": 1.4526955072975917e-06, + "loss": 0.5755, + "step": 12717 + }, + { + "epoch": 0.524799867954114, + "grad_norm": 2.0404051834079824, + "learning_rate": 1.4524951314308885e-06, + "loss": 0.4947, + "step": 12718 + }, + { + "epoch": 0.524841132293472, + "grad_norm": 3.2164557633268025, + "learning_rate": 1.4522947564127406e-06, + "loss": 0.503, + "step": 12719 + }, + { + "epoch": 0.5248823966328299, + "grad_norm": 2.0261566859644495, + "learning_rate": 1.452094382246728e-06, + "loss": 0.5043, + "step": 12720 + }, + { + "epoch": 0.5249236609721878, + "grad_norm": 3.196957269161374, + "learning_rate": 1.4518940089364293e-06, + "loss": 0.5434, + "step": 12721 + }, + { + "epoch": 0.5249649253115458, + "grad_norm": 4.012091925960947, + "learning_rate": 1.4516936364854244e-06, + "loss": 0.5071, + "step": 12722 + }, + { + "epoch": 0.5250061896509037, + "grad_norm": 2.746587269058626, + "learning_rate": 1.4514932648972918e-06, + "loss": 0.5391, + "step": 12723 + }, + { + "epoch": 0.5250474539902616, + "grad_norm": 3.142837330422078, + "learning_rate": 1.451292894175611e-06, + "loss": 0.5097, + "step": 12724 + }, + { + "epoch": 0.5250887183296196, + "grad_norm": 2.202842875640575, + "learning_rate": 1.4510925243239613e-06, + "loss": 0.5836, + "step": 12725 + }, + { + "epoch": 0.5251299826689775, + "grad_norm": 2.8189592429234036, + "learning_rate": 1.4508921553459206e-06, + "loss": 0.5181, + "step": 12726 + }, + { + "epoch": 0.5251712470083354, + "grad_norm": 4.467692405206063, + "learning_rate": 1.4506917872450694e-06, + "loss": 0.5083, + "step": 12727 + }, + { + "epoch": 0.5252125113476933, + "grad_norm": 2.5083356141189386, + "learning_rate": 1.450491420024986e-06, + "loss": 0.5444, + "step": 12728 + }, + { + "epoch": 0.5252537756870512, + "grad_norm": 2.7157879465881143, + "learning_rate": 1.4502910536892497e-06, + "loss": 0.4993, + "step": 12729 + }, + { + "epoch": 0.5252950400264091, + "grad_norm": 2.8648581239974984, + "learning_rate": 1.4500906882414393e-06, + "loss": 0.5222, + "step": 12730 + }, + { + "epoch": 0.5253363043657671, + "grad_norm": 4.3967241842746185, + "learning_rate": 1.4498903236851345e-06, + "loss": 0.5413, + "step": 12731 + }, + { + "epoch": 0.5253775687051251, + "grad_norm": 2.111510722427422, + "learning_rate": 1.4496899600239135e-06, + "loss": 0.4735, + "step": 12732 + }, + { + "epoch": 0.525418833044483, + "grad_norm": 1.9895595658100793, + "learning_rate": 1.449489597261356e-06, + "loss": 0.5244, + "step": 12733 + }, + { + "epoch": 0.5254600973838409, + "grad_norm": 2.992312663944791, + "learning_rate": 1.4492892354010405e-06, + "loss": 0.5006, + "step": 12734 + }, + { + "epoch": 0.5255013617231988, + "grad_norm": 1.7008837984177476, + "learning_rate": 1.4490888744465461e-06, + "loss": 0.5127, + "step": 12735 + }, + { + "epoch": 0.5255426260625568, + "grad_norm": 2.8143044424087504, + "learning_rate": 1.4488885144014515e-06, + "loss": 0.4942, + "step": 12736 + }, + { + "epoch": 0.5255838904019147, + "grad_norm": 2.4310491915077703, + "learning_rate": 1.4486881552693365e-06, + "loss": 0.5504, + "step": 12737 + }, + { + "epoch": 0.5256251547412726, + "grad_norm": 2.8963197738225754, + "learning_rate": 1.4484877970537795e-06, + "loss": 0.4861, + "step": 12738 + }, + { + "epoch": 0.5256664190806305, + "grad_norm": 2.816861040584028, + "learning_rate": 1.4482874397583592e-06, + "loss": 0.516, + "step": 12739 + }, + { + "epoch": 0.5257076834199884, + "grad_norm": 2.9143765693376373, + "learning_rate": 1.448087083386654e-06, + "loss": 0.4977, + "step": 12740 + }, + { + "epoch": 0.5257489477593463, + "grad_norm": 3.1130724575001736, + "learning_rate": 1.4478867279422439e-06, + "loss": 0.5073, + "step": 12741 + }, + { + "epoch": 0.5257902120987042, + "grad_norm": 4.067225413847596, + "learning_rate": 1.4476863734287066e-06, + "loss": 0.5206, + "step": 12742 + }, + { + "epoch": 0.5258314764380623, + "grad_norm": 3.0043217443824597, + "learning_rate": 1.4474860198496222e-06, + "loss": 0.4442, + "step": 12743 + }, + { + "epoch": 0.5258727407774202, + "grad_norm": 2.6411130359610993, + "learning_rate": 1.4472856672085684e-06, + "loss": 0.5549, + "step": 12744 + }, + { + "epoch": 0.5259140051167781, + "grad_norm": 4.457159208316886, + "learning_rate": 1.4470853155091247e-06, + "loss": 0.5129, + "step": 12745 + }, + { + "epoch": 0.525955269456136, + "grad_norm": 3.40239039802154, + "learning_rate": 1.4468849647548694e-06, + "loss": 0.5167, + "step": 12746 + }, + { + "epoch": 0.5259965337954939, + "grad_norm": 2.2026163332762914, + "learning_rate": 1.4466846149493816e-06, + "loss": 0.5359, + "step": 12747 + }, + { + "epoch": 0.5260377981348519, + "grad_norm": 2.0654336453317876, + "learning_rate": 1.4464842660962398e-06, + "loss": 0.5034, + "step": 12748 + }, + { + "epoch": 0.5260790624742098, + "grad_norm": 2.381641315904177, + "learning_rate": 1.4462839181990232e-06, + "loss": 0.5253, + "step": 12749 + }, + { + "epoch": 0.5261203268135677, + "grad_norm": 3.074379921686767, + "learning_rate": 1.4460835712613101e-06, + "loss": 0.5336, + "step": 12750 + }, + { + "epoch": 0.5261615911529256, + "grad_norm": 2.9112915478940056, + "learning_rate": 1.4458832252866796e-06, + "loss": 0.612, + "step": 12751 + }, + { + "epoch": 0.5262028554922835, + "grad_norm": 3.374388203457494, + "learning_rate": 1.44568288027871e-06, + "loss": 0.5346, + "step": 12752 + }, + { + "epoch": 0.5262441198316415, + "grad_norm": 5.1150741146638685, + "learning_rate": 1.4454825362409795e-06, + "loss": 0.5143, + "step": 12753 + }, + { + "epoch": 0.5262853841709995, + "grad_norm": 4.035659863840472, + "learning_rate": 1.4452821931770676e-06, + "loss": 0.5531, + "step": 12754 + }, + { + "epoch": 0.5263266485103574, + "grad_norm": 4.99964907212144, + "learning_rate": 1.445081851090553e-06, + "loss": 0.5607, + "step": 12755 + }, + { + "epoch": 0.5263679128497153, + "grad_norm": 1.8507178447480266, + "learning_rate": 1.4448815099850132e-06, + "loss": 0.4706, + "step": 12756 + }, + { + "epoch": 0.5264091771890732, + "grad_norm": 2.7670440968464867, + "learning_rate": 1.444681169864028e-06, + "loss": 0.4746, + "step": 12757 + }, + { + "epoch": 0.5264504415284311, + "grad_norm": 3.205403200884985, + "learning_rate": 1.4444808307311752e-06, + "loss": 0.5085, + "step": 12758 + }, + { + "epoch": 0.526491705867789, + "grad_norm": 2.7913404939659308, + "learning_rate": 1.4442804925900339e-06, + "loss": 0.5371, + "step": 12759 + }, + { + "epoch": 0.526532970207147, + "grad_norm": 4.58247466789092, + "learning_rate": 1.444080155444182e-06, + "loss": 0.5178, + "step": 12760 + }, + { + "epoch": 0.5265742345465049, + "grad_norm": 3.390118253773251, + "learning_rate": 1.4438798192971989e-06, + "loss": 0.5283, + "step": 12761 + }, + { + "epoch": 0.5266154988858628, + "grad_norm": 3.196222824834011, + "learning_rate": 1.4436794841526622e-06, + "loss": 0.5177, + "step": 12762 + }, + { + "epoch": 0.5266567632252208, + "grad_norm": 3.530742553354445, + "learning_rate": 1.4434791500141512e-06, + "loss": 0.5112, + "step": 12763 + }, + { + "epoch": 0.5266980275645787, + "grad_norm": 2.620961335344793, + "learning_rate": 1.4432788168852441e-06, + "loss": 0.5119, + "step": 12764 + }, + { + "epoch": 0.5267392919039366, + "grad_norm": 3.5898829422224408, + "learning_rate": 1.4430784847695187e-06, + "loss": 0.4708, + "step": 12765 + }, + { + "epoch": 0.5267805562432946, + "grad_norm": 2.88873187603045, + "learning_rate": 1.4428781536705543e-06, + "loss": 0.4939, + "step": 12766 + }, + { + "epoch": 0.5268218205826525, + "grad_norm": 12.736277029664157, + "learning_rate": 1.4426778235919285e-06, + "loss": 0.5468, + "step": 12767 + }, + { + "epoch": 0.5268630849220104, + "grad_norm": 14.332271780297454, + "learning_rate": 1.4424774945372207e-06, + "loss": 0.5156, + "step": 12768 + }, + { + "epoch": 0.5269043492613683, + "grad_norm": 2.4833580689249852, + "learning_rate": 1.4422771665100084e-06, + "loss": 0.4555, + "step": 12769 + }, + { + "epoch": 0.5269456136007262, + "grad_norm": 4.124790403943441, + "learning_rate": 1.4420768395138706e-06, + "loss": 0.5068, + "step": 12770 + }, + { + "epoch": 0.5269868779400841, + "grad_norm": 2.4290986034126467, + "learning_rate": 1.441876513552385e-06, + "loss": 0.5383, + "step": 12771 + }, + { + "epoch": 0.5270281422794421, + "grad_norm": 5.295230071528774, + "learning_rate": 1.4416761886291307e-06, + "loss": 0.507, + "step": 12772 + }, + { + "epoch": 0.5270694066188001, + "grad_norm": 2.211448325600547, + "learning_rate": 1.441475864747685e-06, + "loss": 0.4589, + "step": 12773 + }, + { + "epoch": 0.527110670958158, + "grad_norm": 2.7758038674345933, + "learning_rate": 1.4412755419116274e-06, + "loss": 0.5959, + "step": 12774 + }, + { + "epoch": 0.5271519352975159, + "grad_norm": 3.382963012547261, + "learning_rate": 1.4410752201245352e-06, + "loss": 0.5387, + "step": 12775 + }, + { + "epoch": 0.5271931996368738, + "grad_norm": 3.029498150335586, + "learning_rate": 1.440874899389987e-06, + "loss": 0.5282, + "step": 12776 + }, + { + "epoch": 0.5272344639762317, + "grad_norm": 3.6743738865928597, + "learning_rate": 1.4406745797115615e-06, + "loss": 0.4917, + "step": 12777 + }, + { + "epoch": 0.5272757283155897, + "grad_norm": 3.090989373858858, + "learning_rate": 1.4404742610928362e-06, + "loss": 0.5012, + "step": 12778 + }, + { + "epoch": 0.5273169926549476, + "grad_norm": 2.9995891162098114, + "learning_rate": 1.4402739435373889e-06, + "loss": 0.4733, + "step": 12779 + }, + { + "epoch": 0.5273582569943055, + "grad_norm": 5.2988065015455925, + "learning_rate": 1.440073627048799e-06, + "loss": 0.484, + "step": 12780 + }, + { + "epoch": 0.5273995213336634, + "grad_norm": 2.573650721207413, + "learning_rate": 1.4398733116306436e-06, + "loss": 0.496, + "step": 12781 + }, + { + "epoch": 0.5274407856730213, + "grad_norm": 2.7013447754854973, + "learning_rate": 1.4396729972865014e-06, + "loss": 0.5155, + "step": 12782 + }, + { + "epoch": 0.5274820500123794, + "grad_norm": 5.730996907651918, + "learning_rate": 1.4394726840199503e-06, + "loss": 0.5727, + "step": 12783 + }, + { + "epoch": 0.5275233143517373, + "grad_norm": 3.2066166999973165, + "learning_rate": 1.4392723718345688e-06, + "loss": 0.5076, + "step": 12784 + }, + { + "epoch": 0.5275645786910952, + "grad_norm": 2.524134855340643, + "learning_rate": 1.4390720607339341e-06, + "loss": 0.5127, + "step": 12785 + }, + { + "epoch": 0.5276058430304531, + "grad_norm": 2.4022574674175696, + "learning_rate": 1.4388717507216254e-06, + "loss": 0.5341, + "step": 12786 + }, + { + "epoch": 0.527647107369811, + "grad_norm": 4.934465322182969, + "learning_rate": 1.4386714418012196e-06, + "loss": 0.479, + "step": 12787 + }, + { + "epoch": 0.5276883717091689, + "grad_norm": 3.3557552265806336, + "learning_rate": 1.4384711339762958e-06, + "loss": 0.4892, + "step": 12788 + }, + { + "epoch": 0.5277296360485269, + "grad_norm": 3.4070935020761977, + "learning_rate": 1.4382708272504312e-06, + "loss": 0.5097, + "step": 12789 + }, + { + "epoch": 0.5277709003878848, + "grad_norm": 3.3481906497850673, + "learning_rate": 1.4380705216272043e-06, + "loss": 0.5302, + "step": 12790 + }, + { + "epoch": 0.5278121647272427, + "grad_norm": 5.046371710785326, + "learning_rate": 1.437870217110193e-06, + "loss": 0.5774, + "step": 12791 + }, + { + "epoch": 0.5278534290666006, + "grad_norm": 5.272712708942345, + "learning_rate": 1.4376699137029743e-06, + "loss": 0.5464, + "step": 12792 + }, + { + "epoch": 0.5278946934059586, + "grad_norm": 2.015739233654831, + "learning_rate": 1.4374696114091275e-06, + "loss": 0.5361, + "step": 12793 + }, + { + "epoch": 0.5279359577453165, + "grad_norm": 4.116842605372763, + "learning_rate": 1.4372693102322296e-06, + "loss": 0.4623, + "step": 12794 + }, + { + "epoch": 0.5279772220846745, + "grad_norm": 6.097577361805211, + "learning_rate": 1.4370690101758586e-06, + "loss": 0.534, + "step": 12795 + }, + { + "epoch": 0.5280184864240324, + "grad_norm": 4.870310798133202, + "learning_rate": 1.4368687112435928e-06, + "loss": 0.5108, + "step": 12796 + }, + { + "epoch": 0.5280597507633903, + "grad_norm": 2.9829780598169515, + "learning_rate": 1.4366684134390094e-06, + "loss": 0.5669, + "step": 12797 + }, + { + "epoch": 0.5281010151027482, + "grad_norm": 1.8978361543152247, + "learning_rate": 1.4364681167656868e-06, + "loss": 0.4654, + "step": 12798 + }, + { + "epoch": 0.5281422794421061, + "grad_norm": 4.9292385841366375, + "learning_rate": 1.4362678212272024e-06, + "loss": 0.5476, + "step": 12799 + }, + { + "epoch": 0.528183543781464, + "grad_norm": 2.542005559413269, + "learning_rate": 1.4360675268271343e-06, + "loss": 0.5078, + "step": 12800 + }, + { + "epoch": 0.528224808120822, + "grad_norm": 1.8996563640565958, + "learning_rate": 1.43586723356906e-06, + "loss": 0.4971, + "step": 12801 + }, + { + "epoch": 0.5282660724601799, + "grad_norm": 2.3530529900747403, + "learning_rate": 1.4356669414565577e-06, + "loss": 0.4587, + "step": 12802 + }, + { + "epoch": 0.5283073367995378, + "grad_norm": 8.198978621733687, + "learning_rate": 1.435466650493204e-06, + "loss": 0.5267, + "step": 12803 + }, + { + "epoch": 0.5283486011388958, + "grad_norm": 2.097015373459558, + "learning_rate": 1.4352663606825783e-06, + "loss": 0.5383, + "step": 12804 + }, + { + "epoch": 0.5283898654782537, + "grad_norm": 5.88708367570696, + "learning_rate": 1.4350660720282572e-06, + "loss": 0.4655, + "step": 12805 + }, + { + "epoch": 0.5284311298176116, + "grad_norm": 7.700716540249802, + "learning_rate": 1.434865784533818e-06, + "loss": 0.5495, + "step": 12806 + }, + { + "epoch": 0.5284723941569696, + "grad_norm": 2.081118040354205, + "learning_rate": 1.434665498202839e-06, + "loss": 0.5045, + "step": 12807 + }, + { + "epoch": 0.5285136584963275, + "grad_norm": 5.016295226601208, + "learning_rate": 1.4344652130388976e-06, + "loss": 0.5445, + "step": 12808 + }, + { + "epoch": 0.5285549228356854, + "grad_norm": 4.07070193655789, + "learning_rate": 1.4342649290455716e-06, + "loss": 0.5048, + "step": 12809 + }, + { + "epoch": 0.5285961871750433, + "grad_norm": 4.050015541125231, + "learning_rate": 1.434064646226438e-06, + "loss": 0.4791, + "step": 12810 + }, + { + "epoch": 0.5286374515144012, + "grad_norm": 4.485289844124399, + "learning_rate": 1.4338643645850751e-06, + "loss": 0.5396, + "step": 12811 + }, + { + "epoch": 0.5286787158537591, + "grad_norm": 4.7502349755626225, + "learning_rate": 1.4336640841250598e-06, + "loss": 0.4953, + "step": 12812 + }, + { + "epoch": 0.5287199801931171, + "grad_norm": 2.574714749006157, + "learning_rate": 1.4334638048499706e-06, + "loss": 0.4999, + "step": 12813 + }, + { + "epoch": 0.5287612445324751, + "grad_norm": 2.7914124368294297, + "learning_rate": 1.4332635267633835e-06, + "loss": 0.5241, + "step": 12814 + }, + { + "epoch": 0.528802508871833, + "grad_norm": 2.7149308010267137, + "learning_rate": 1.4330632498688774e-06, + "loss": 0.5851, + "step": 12815 + }, + { + "epoch": 0.5288437732111909, + "grad_norm": 2.426891291770928, + "learning_rate": 1.4328629741700288e-06, + "loss": 0.5235, + "step": 12816 + }, + { + "epoch": 0.5288850375505488, + "grad_norm": 2.151878153795158, + "learning_rate": 1.432662699670416e-06, + "loss": 0.4954, + "step": 12817 + }, + { + "epoch": 0.5289263018899067, + "grad_norm": 3.118965831819781, + "learning_rate": 1.4324624263736151e-06, + "loss": 0.5448, + "step": 12818 + }, + { + "epoch": 0.5289675662292647, + "grad_norm": 4.044987021227344, + "learning_rate": 1.4322621542832046e-06, + "loss": 0.5247, + "step": 12819 + }, + { + "epoch": 0.5290088305686226, + "grad_norm": 7.036880263912035, + "learning_rate": 1.432061883402761e-06, + "loss": 0.5688, + "step": 12820 + }, + { + "epoch": 0.5290500949079805, + "grad_norm": 3.8765211345203388, + "learning_rate": 1.4318616137358626e-06, + "loss": 0.5271, + "step": 12821 + }, + { + "epoch": 0.5290913592473384, + "grad_norm": 2.345388314988821, + "learning_rate": 1.4316613452860859e-06, + "loss": 0.5152, + "step": 12822 + }, + { + "epoch": 0.5291326235866963, + "grad_norm": 4.300965024302015, + "learning_rate": 1.4314610780570087e-06, + "loss": 0.6151, + "step": 12823 + }, + { + "epoch": 0.5291738879260544, + "grad_norm": 2.6802020072520163, + "learning_rate": 1.4312608120522078e-06, + "loss": 0.5131, + "step": 12824 + }, + { + "epoch": 0.5292151522654123, + "grad_norm": 4.885133160685022, + "learning_rate": 1.4310605472752612e-06, + "loss": 0.474, + "step": 12825 + }, + { + "epoch": 0.5292564166047702, + "grad_norm": 7.284194468937402, + "learning_rate": 1.4308602837297455e-06, + "loss": 0.5594, + "step": 12826 + }, + { + "epoch": 0.5292976809441281, + "grad_norm": 10.061180994333062, + "learning_rate": 1.4306600214192382e-06, + "loss": 0.5387, + "step": 12827 + }, + { + "epoch": 0.529338945283486, + "grad_norm": 16.770085316699618, + "learning_rate": 1.4304597603473162e-06, + "loss": 0.4897, + "step": 12828 + }, + { + "epoch": 0.5293802096228439, + "grad_norm": 2.2306703354200064, + "learning_rate": 1.430259500517557e-06, + "loss": 0.5534, + "step": 12829 + }, + { + "epoch": 0.5294214739622018, + "grad_norm": 1.951334840646782, + "learning_rate": 1.4300592419335378e-06, + "loss": 0.5171, + "step": 12830 + }, + { + "epoch": 0.5294627383015598, + "grad_norm": 5.364782575730311, + "learning_rate": 1.429858984598835e-06, + "loss": 0.5996, + "step": 12831 + }, + { + "epoch": 0.5295040026409177, + "grad_norm": 11.665028535819154, + "learning_rate": 1.4296587285170266e-06, + "loss": 0.5543, + "step": 12832 + }, + { + "epoch": 0.5295452669802756, + "grad_norm": 4.9768593024362655, + "learning_rate": 1.4294584736916893e-06, + "loss": 0.5279, + "step": 12833 + }, + { + "epoch": 0.5295865313196336, + "grad_norm": 3.509936388645637, + "learning_rate": 1.4292582201263996e-06, + "loss": 0.615, + "step": 12834 + }, + { + "epoch": 0.5296277956589915, + "grad_norm": 3.1765874585706992, + "learning_rate": 1.4290579678247355e-06, + "loss": 0.5674, + "step": 12835 + }, + { + "epoch": 0.5296690599983495, + "grad_norm": 5.282099113401111, + "learning_rate": 1.4288577167902734e-06, + "loss": 0.5673, + "step": 12836 + }, + { + "epoch": 0.5297103243377074, + "grad_norm": 2.625295161097604, + "learning_rate": 1.4286574670265906e-06, + "loss": 0.551, + "step": 12837 + }, + { + "epoch": 0.5297515886770653, + "grad_norm": 1.8628798254029193, + "learning_rate": 1.4284572185372635e-06, + "loss": 0.4993, + "step": 12838 + }, + { + "epoch": 0.5297928530164232, + "grad_norm": 2.956029214395986, + "learning_rate": 1.4282569713258701e-06, + "loss": 0.502, + "step": 12839 + }, + { + "epoch": 0.5298341173557811, + "grad_norm": 4.619355429979543, + "learning_rate": 1.4280567253959862e-06, + "loss": 0.5179, + "step": 12840 + }, + { + "epoch": 0.529875381695139, + "grad_norm": 3.695836285196334, + "learning_rate": 1.4278564807511893e-06, + "loss": 0.548, + "step": 12841 + }, + { + "epoch": 0.529916646034497, + "grad_norm": 3.632509771686023, + "learning_rate": 1.4276562373950558e-06, + "loss": 0.475, + "step": 12842 + }, + { + "epoch": 0.5299579103738549, + "grad_norm": 2.405377003629645, + "learning_rate": 1.4274559953311637e-06, + "loss": 0.5202, + "step": 12843 + }, + { + "epoch": 0.5299991747132129, + "grad_norm": 2.0008093729956924, + "learning_rate": 1.4272557545630885e-06, + "loss": 0.5244, + "step": 12844 + }, + { + "epoch": 0.5300404390525708, + "grad_norm": 3.1516692414360685, + "learning_rate": 1.4270555150944075e-06, + "loss": 0.5739, + "step": 12845 + }, + { + "epoch": 0.5300817033919287, + "grad_norm": 5.717906395214731, + "learning_rate": 1.4268552769286975e-06, + "loss": 0.5152, + "step": 12846 + }, + { + "epoch": 0.5301229677312866, + "grad_norm": 2.5219674203927256, + "learning_rate": 1.426655040069535e-06, + "loss": 0.515, + "step": 12847 + }, + { + "epoch": 0.5301642320706446, + "grad_norm": 2.4772318077590785, + "learning_rate": 1.4264548045204973e-06, + "loss": 0.5308, + "step": 12848 + }, + { + "epoch": 0.5302054964100025, + "grad_norm": 2.1129962401225977, + "learning_rate": 1.4262545702851606e-06, + "loss": 0.5291, + "step": 12849 + }, + { + "epoch": 0.5302467607493604, + "grad_norm": 2.128297749667427, + "learning_rate": 1.4260543373671018e-06, + "loss": 0.4753, + "step": 12850 + }, + { + "epoch": 0.5302880250887183, + "grad_norm": 7.900900008506916, + "learning_rate": 1.4258541057698973e-06, + "loss": 0.4628, + "step": 12851 + }, + { + "epoch": 0.5303292894280762, + "grad_norm": 6.648065167569253, + "learning_rate": 1.4256538754971243e-06, + "loss": 0.4839, + "step": 12852 + }, + { + "epoch": 0.5303705537674341, + "grad_norm": 3.8154021807173955, + "learning_rate": 1.4254536465523588e-06, + "loss": 0.5547, + "step": 12853 + }, + { + "epoch": 0.5304118181067922, + "grad_norm": 1.6428096242686574, + "learning_rate": 1.4252534189391778e-06, + "loss": 0.5251, + "step": 12854 + }, + { + "epoch": 0.5304530824461501, + "grad_norm": 4.821592796176187, + "learning_rate": 1.4250531926611579e-06, + "loss": 0.5051, + "step": 12855 + }, + { + "epoch": 0.530494346785508, + "grad_norm": 3.5083628967525358, + "learning_rate": 1.4248529677218759e-06, + "loss": 0.524, + "step": 12856 + }, + { + "epoch": 0.5305356111248659, + "grad_norm": 10.08703561473614, + "learning_rate": 1.424652744124907e-06, + "loss": 0.5402, + "step": 12857 + }, + { + "epoch": 0.5305768754642238, + "grad_norm": 2.0099850343866836, + "learning_rate": 1.424452521873829e-06, + "loss": 0.5415, + "step": 12858 + }, + { + "epoch": 0.5306181398035817, + "grad_norm": 2.7204169700456498, + "learning_rate": 1.4242523009722177e-06, + "loss": 0.5305, + "step": 12859 + }, + { + "epoch": 0.5306594041429397, + "grad_norm": 3.0912995867034323, + "learning_rate": 1.42405208142365e-06, + "loss": 0.5084, + "step": 12860 + }, + { + "epoch": 0.5307006684822976, + "grad_norm": 4.13986503497025, + "learning_rate": 1.4238518632317019e-06, + "loss": 0.4924, + "step": 12861 + }, + { + "epoch": 0.5307419328216555, + "grad_norm": 2.7889434655711507, + "learning_rate": 1.42365164639995e-06, + "loss": 0.5165, + "step": 12862 + }, + { + "epoch": 0.5307831971610134, + "grad_norm": 2.9739608327069975, + "learning_rate": 1.4234514309319709e-06, + "loss": 0.5543, + "step": 12863 + }, + { + "epoch": 0.5308244615003713, + "grad_norm": 3.0603551527071167, + "learning_rate": 1.4232512168313407e-06, + "loss": 0.4966, + "step": 12864 + }, + { + "epoch": 0.5308657258397294, + "grad_norm": 4.106691645398707, + "learning_rate": 1.4230510041016356e-06, + "loss": 0.5917, + "step": 12865 + }, + { + "epoch": 0.5309069901790873, + "grad_norm": 3.0939252134873936, + "learning_rate": 1.4228507927464323e-06, + "loss": 0.5399, + "step": 12866 + }, + { + "epoch": 0.5309482545184452, + "grad_norm": 1.9646867292755192, + "learning_rate": 1.4226505827693065e-06, + "loss": 0.4832, + "step": 12867 + }, + { + "epoch": 0.5309895188578031, + "grad_norm": 5.795258437303669, + "learning_rate": 1.4224503741738352e-06, + "loss": 0.5206, + "step": 12868 + }, + { + "epoch": 0.531030783197161, + "grad_norm": 3.216031386128158, + "learning_rate": 1.4222501669635946e-06, + "loss": 0.5331, + "step": 12869 + }, + { + "epoch": 0.5310720475365189, + "grad_norm": 8.486239610883509, + "learning_rate": 1.4220499611421596e-06, + "loss": 0.4937, + "step": 12870 + }, + { + "epoch": 0.5311133118758768, + "grad_norm": 2.773459209001624, + "learning_rate": 1.4218497567131079e-06, + "loss": 0.5405, + "step": 12871 + }, + { + "epoch": 0.5311545762152348, + "grad_norm": 2.8314016362752286, + "learning_rate": 1.421649553680015e-06, + "loss": 0.5502, + "step": 12872 + }, + { + "epoch": 0.5311958405545927, + "grad_norm": 8.100560481211103, + "learning_rate": 1.4214493520464565e-06, + "loss": 0.4982, + "step": 12873 + }, + { + "epoch": 0.5312371048939506, + "grad_norm": 5.017653880863078, + "learning_rate": 1.4212491518160097e-06, + "loss": 0.5669, + "step": 12874 + }, + { + "epoch": 0.5312783692333086, + "grad_norm": 8.155712829245706, + "learning_rate": 1.4210489529922495e-06, + "loss": 0.549, + "step": 12875 + }, + { + "epoch": 0.5313196335726665, + "grad_norm": 14.678514903877536, + "learning_rate": 1.420848755578753e-06, + "loss": 0.5383, + "step": 12876 + }, + { + "epoch": 0.5313608979120245, + "grad_norm": 2.6979505731911364, + "learning_rate": 1.4206485595790953e-06, + "loss": 0.5345, + "step": 12877 + }, + { + "epoch": 0.5314021622513824, + "grad_norm": 91.53558358333815, + "learning_rate": 1.4204483649968533e-06, + "loss": 0.5778, + "step": 12878 + }, + { + "epoch": 0.5314434265907403, + "grad_norm": 6.152746875413943, + "learning_rate": 1.4202481718356021e-06, + "loss": 0.5156, + "step": 12879 + }, + { + "epoch": 0.5314846909300982, + "grad_norm": 3.502001446709403, + "learning_rate": 1.4200479800989184e-06, + "loss": 0.6058, + "step": 12880 + }, + { + "epoch": 0.5315259552694561, + "grad_norm": 2.5777652540103033, + "learning_rate": 1.4198477897903774e-06, + "loss": 0.6144, + "step": 12881 + }, + { + "epoch": 0.531567219608814, + "grad_norm": 4.729098179617037, + "learning_rate": 1.419647600913556e-06, + "loss": 0.5642, + "step": 12882 + }, + { + "epoch": 0.531608483948172, + "grad_norm": 23.797422874685395, + "learning_rate": 1.4194474134720293e-06, + "loss": 0.467, + "step": 12883 + }, + { + "epoch": 0.5316497482875299, + "grad_norm": 3.5437491776265597, + "learning_rate": 1.419247227469373e-06, + "loss": 0.5158, + "step": 12884 + }, + { + "epoch": 0.5316910126268879, + "grad_norm": 4.405158701281467, + "learning_rate": 1.4190470429091633e-06, + "loss": 0.5363, + "step": 12885 + }, + { + "epoch": 0.5317322769662458, + "grad_norm": 2.998261656815935, + "learning_rate": 1.4188468597949757e-06, + "loss": 0.5291, + "step": 12886 + }, + { + "epoch": 0.5317735413056037, + "grad_norm": 5.690818889384691, + "learning_rate": 1.4186466781303865e-06, + "loss": 0.5239, + "step": 12887 + }, + { + "epoch": 0.5318148056449616, + "grad_norm": 11.600051553619567, + "learning_rate": 1.418446497918971e-06, + "loss": 0.5697, + "step": 12888 + }, + { + "epoch": 0.5318560699843196, + "grad_norm": 3.4057335820332875, + "learning_rate": 1.4182463191643054e-06, + "loss": 0.5604, + "step": 12889 + }, + { + "epoch": 0.5318973343236775, + "grad_norm": 2.4626235019639138, + "learning_rate": 1.4180461418699645e-06, + "loss": 0.5397, + "step": 12890 + }, + { + "epoch": 0.5319385986630354, + "grad_norm": 4.0248509323066886, + "learning_rate": 1.417845966039525e-06, + "loss": 0.4868, + "step": 12891 + }, + { + "epoch": 0.5319798630023933, + "grad_norm": 10.32439200623679, + "learning_rate": 1.4176457916765616e-06, + "loss": 0.5547, + "step": 12892 + }, + { + "epoch": 0.5320211273417512, + "grad_norm": 3.7461890879473643, + "learning_rate": 1.4174456187846507e-06, + "loss": 0.5528, + "step": 12893 + }, + { + "epoch": 0.5320623916811091, + "grad_norm": 5.119995499958391, + "learning_rate": 1.4172454473673677e-06, + "loss": 0.5457, + "step": 12894 + }, + { + "epoch": 0.5321036560204672, + "grad_norm": 5.869341752221985, + "learning_rate": 1.4170452774282876e-06, + "loss": 0.5503, + "step": 12895 + }, + { + "epoch": 0.5321449203598251, + "grad_norm": 2.4760973651857587, + "learning_rate": 1.4168451089709872e-06, + "loss": 0.4946, + "step": 12896 + }, + { + "epoch": 0.532186184699183, + "grad_norm": 4.188186106518659, + "learning_rate": 1.4166449419990406e-06, + "loss": 0.557, + "step": 12897 + }, + { + "epoch": 0.5322274490385409, + "grad_norm": 5.324085182253526, + "learning_rate": 1.4164447765160238e-06, + "loss": 0.543, + "step": 12898 + }, + { + "epoch": 0.5322687133778988, + "grad_norm": 3.5894329421256246, + "learning_rate": 1.4162446125255125e-06, + "loss": 0.5408, + "step": 12899 + }, + { + "epoch": 0.5323099777172567, + "grad_norm": 1.7622380863479805, + "learning_rate": 1.4160444500310816e-06, + "loss": 0.5001, + "step": 12900 + }, + { + "epoch": 0.5323512420566147, + "grad_norm": 5.816375687289812, + "learning_rate": 1.4158442890363071e-06, + "loss": 0.4892, + "step": 12901 + }, + { + "epoch": 0.5323925063959726, + "grad_norm": 3.934146753135822, + "learning_rate": 1.415644129544764e-06, + "loss": 0.5348, + "step": 12902 + }, + { + "epoch": 0.5324337707353305, + "grad_norm": 2.587014705090322, + "learning_rate": 1.4154439715600279e-06, + "loss": 0.5149, + "step": 12903 + }, + { + "epoch": 0.5324750350746884, + "grad_norm": 11.327190062629676, + "learning_rate": 1.4152438150856738e-06, + "loss": 0.5301, + "step": 12904 + }, + { + "epoch": 0.5325162994140464, + "grad_norm": 3.638246692437565, + "learning_rate": 1.4150436601252774e-06, + "loss": 0.5257, + "step": 12905 + }, + { + "epoch": 0.5325575637534044, + "grad_norm": 1.8496937563343778, + "learning_rate": 1.4148435066824136e-06, + "loss": 0.5021, + "step": 12906 + }, + { + "epoch": 0.5325988280927623, + "grad_norm": 4.817605605967128, + "learning_rate": 1.4146433547606581e-06, + "loss": 0.594, + "step": 12907 + }, + { + "epoch": 0.5326400924321202, + "grad_norm": 2.5876195730403926, + "learning_rate": 1.4144432043635853e-06, + "loss": 0.4765, + "step": 12908 + }, + { + "epoch": 0.5326813567714781, + "grad_norm": 2.2610078053348093, + "learning_rate": 1.4142430554947718e-06, + "loss": 0.5017, + "step": 12909 + }, + { + "epoch": 0.532722621110836, + "grad_norm": 2.47556145530517, + "learning_rate": 1.4140429081577914e-06, + "loss": 0.5431, + "step": 12910 + }, + { + "epoch": 0.5327638854501939, + "grad_norm": 3.701189210817995, + "learning_rate": 1.4138427623562197e-06, + "loss": 0.5763, + "step": 12911 + }, + { + "epoch": 0.5328051497895518, + "grad_norm": 3.307471127900297, + "learning_rate": 1.4136426180936316e-06, + "loss": 0.5374, + "step": 12912 + }, + { + "epoch": 0.5328464141289098, + "grad_norm": 4.800829607799595, + "learning_rate": 1.4134424753736024e-06, + "loss": 0.5158, + "step": 12913 + }, + { + "epoch": 0.5328876784682677, + "grad_norm": 2.3704579399901125, + "learning_rate": 1.413242334199707e-06, + "loss": 0.5182, + "step": 12914 + }, + { + "epoch": 0.5329289428076257, + "grad_norm": 2.6170455966536177, + "learning_rate": 1.4130421945755208e-06, + "loss": 0.523, + "step": 12915 + }, + { + "epoch": 0.5329702071469836, + "grad_norm": 2.7933763967315284, + "learning_rate": 1.4128420565046183e-06, + "loss": 0.5842, + "step": 12916 + }, + { + "epoch": 0.5330114714863415, + "grad_norm": 9.49662167616198, + "learning_rate": 1.412641919990575e-06, + "loss": 0.5195, + "step": 12917 + }, + { + "epoch": 0.5330527358256995, + "grad_norm": 2.8160414276766894, + "learning_rate": 1.4124417850369652e-06, + "loss": 0.5451, + "step": 12918 + }, + { + "epoch": 0.5330940001650574, + "grad_norm": 4.769859184078577, + "learning_rate": 1.4122416516473644e-06, + "loss": 0.5768, + "step": 12919 + }, + { + "epoch": 0.5331352645044153, + "grad_norm": 4.27524071932977, + "learning_rate": 1.4120415198253471e-06, + "loss": 0.5779, + "step": 12920 + }, + { + "epoch": 0.5331765288437732, + "grad_norm": 3.0136225621717645, + "learning_rate": 1.4118413895744885e-06, + "loss": 0.5184, + "step": 12921 + }, + { + "epoch": 0.5332177931831311, + "grad_norm": 3.3328494260257253, + "learning_rate": 1.4116412608983637e-06, + "loss": 0.5234, + "step": 12922 + }, + { + "epoch": 0.533259057522489, + "grad_norm": 3.0366197954086913, + "learning_rate": 1.4114411338005463e-06, + "loss": 0.5768, + "step": 12923 + }, + { + "epoch": 0.533300321861847, + "grad_norm": 2.3189803953637322, + "learning_rate": 1.4112410082846118e-06, + "loss": 0.5219, + "step": 12924 + }, + { + "epoch": 0.5333415862012049, + "grad_norm": 2.2806003885011625, + "learning_rate": 1.411040884354135e-06, + "loss": 0.5971, + "step": 12925 + }, + { + "epoch": 0.5333828505405629, + "grad_norm": 2.913558113125701, + "learning_rate": 1.4108407620126908e-06, + "loss": 0.5322, + "step": 12926 + }, + { + "epoch": 0.5334241148799208, + "grad_norm": 2.106052656749185, + "learning_rate": 1.4106406412638531e-06, + "loss": 0.4842, + "step": 12927 + }, + { + "epoch": 0.5334653792192787, + "grad_norm": 4.731401824920038, + "learning_rate": 1.4104405221111975e-06, + "loss": 0.4807, + "step": 12928 + }, + { + "epoch": 0.5335066435586366, + "grad_norm": 2.842334804878744, + "learning_rate": 1.4102404045582981e-06, + "loss": 0.4885, + "step": 12929 + }, + { + "epoch": 0.5335479078979946, + "grad_norm": 7.129935979179621, + "learning_rate": 1.4100402886087298e-06, + "loss": 0.5402, + "step": 12930 + }, + { + "epoch": 0.5335891722373525, + "grad_norm": 3.0159383929213894, + "learning_rate": 1.4098401742660667e-06, + "loss": 0.5862, + "step": 12931 + }, + { + "epoch": 0.5336304365767104, + "grad_norm": 2.630693928863661, + "learning_rate": 1.409640061533884e-06, + "loss": 0.556, + "step": 12932 + }, + { + "epoch": 0.5336717009160683, + "grad_norm": 2.5441322450531816, + "learning_rate": 1.4094399504157556e-06, + "loss": 0.4943, + "step": 12933 + }, + { + "epoch": 0.5337129652554262, + "grad_norm": 2.3316522259206285, + "learning_rate": 1.409239840915256e-06, + "loss": 0.5783, + "step": 12934 + }, + { + "epoch": 0.5337542295947841, + "grad_norm": 2.737958692146083, + "learning_rate": 1.4090397330359608e-06, + "loss": 0.5025, + "step": 12935 + }, + { + "epoch": 0.5337954939341422, + "grad_norm": 2.780290988706852, + "learning_rate": 1.4088396267814429e-06, + "loss": 0.513, + "step": 12936 + }, + { + "epoch": 0.5338367582735001, + "grad_norm": 3.3428030585358943, + "learning_rate": 1.4086395221552772e-06, + "loss": 0.4904, + "step": 12937 + }, + { + "epoch": 0.533878022612858, + "grad_norm": 4.822808456854626, + "learning_rate": 1.4084394191610382e-06, + "loss": 0.5392, + "step": 12938 + }, + { + "epoch": 0.5339192869522159, + "grad_norm": 3.358049525605729, + "learning_rate": 1.4082393178023002e-06, + "loss": 0.5627, + "step": 12939 + }, + { + "epoch": 0.5339605512915738, + "grad_norm": 2.551005278767481, + "learning_rate": 1.4080392180826377e-06, + "loss": 0.5821, + "step": 12940 + }, + { + "epoch": 0.5340018156309317, + "grad_norm": 4.551629178005872, + "learning_rate": 1.4078391200056246e-06, + "loss": 0.4741, + "step": 12941 + }, + { + "epoch": 0.5340430799702897, + "grad_norm": 3.560926932739623, + "learning_rate": 1.4076390235748358e-06, + "loss": 0.5986, + "step": 12942 + }, + { + "epoch": 0.5340843443096476, + "grad_norm": 9.824285751211214, + "learning_rate": 1.4074389287938444e-06, + "loss": 0.5172, + "step": 12943 + }, + { + "epoch": 0.5341256086490055, + "grad_norm": 2.983722888817001, + "learning_rate": 1.407238835666226e-06, + "loss": 0.5265, + "step": 12944 + }, + { + "epoch": 0.5341668729883634, + "grad_norm": 1.63694914354782, + "learning_rate": 1.4070387441955534e-06, + "loss": 0.4953, + "step": 12945 + }, + { + "epoch": 0.5342081373277214, + "grad_norm": 2.3742246425516447, + "learning_rate": 1.4068386543854017e-06, + "loss": 0.474, + "step": 12946 + }, + { + "epoch": 0.5342494016670793, + "grad_norm": 5.283052877771086, + "learning_rate": 1.4066385662393448e-06, + "loss": 0.5449, + "step": 12947 + }, + { + "epoch": 0.5342906660064373, + "grad_norm": 3.1324694306531478, + "learning_rate": 1.406438479760957e-06, + "loss": 0.507, + "step": 12948 + }, + { + "epoch": 0.5343319303457952, + "grad_norm": 7.7740958281184245, + "learning_rate": 1.4062383949538117e-06, + "loss": 0.5169, + "step": 12949 + }, + { + "epoch": 0.5343731946851531, + "grad_norm": 2.984643547155147, + "learning_rate": 1.4060383118214832e-06, + "loss": 0.5178, + "step": 12950 + }, + { + "epoch": 0.534414459024511, + "grad_norm": 2.587061288006458, + "learning_rate": 1.405838230367545e-06, + "loss": 0.5494, + "step": 12951 + }, + { + "epoch": 0.5344557233638689, + "grad_norm": 2.255081538788199, + "learning_rate": 1.4056381505955722e-06, + "loss": 0.4994, + "step": 12952 + }, + { + "epoch": 0.5344969877032268, + "grad_norm": 5.2849098400136345, + "learning_rate": 1.4054380725091377e-06, + "loss": 0.5086, + "step": 12953 + }, + { + "epoch": 0.5345382520425848, + "grad_norm": 4.883859188723775, + "learning_rate": 1.405237996111816e-06, + "loss": 0.4996, + "step": 12954 + }, + { + "epoch": 0.5345795163819427, + "grad_norm": 13.804798848916121, + "learning_rate": 1.4050379214071805e-06, + "loss": 0.5036, + "step": 12955 + }, + { + "epoch": 0.5346207807213007, + "grad_norm": 6.559780907514689, + "learning_rate": 1.4048378483988055e-06, + "loss": 0.5068, + "step": 12956 + }, + { + "epoch": 0.5346620450606586, + "grad_norm": 5.601539584551587, + "learning_rate": 1.4046377770902645e-06, + "loss": 0.518, + "step": 12957 + }, + { + "epoch": 0.5347033094000165, + "grad_norm": 2.9520756742076975, + "learning_rate": 1.4044377074851315e-06, + "loss": 0.5637, + "step": 12958 + }, + { + "epoch": 0.5347445737393745, + "grad_norm": 3.0015111391732625, + "learning_rate": 1.4042376395869797e-06, + "loss": 0.5218, + "step": 12959 + }, + { + "epoch": 0.5347858380787324, + "grad_norm": 3.0672416941602707, + "learning_rate": 1.4040375733993837e-06, + "loss": 0.5238, + "step": 12960 + }, + { + "epoch": 0.5348271024180903, + "grad_norm": 3.419908719062649, + "learning_rate": 1.403837508925917e-06, + "loss": 0.526, + "step": 12961 + }, + { + "epoch": 0.5348683667574482, + "grad_norm": 2.499676797073006, + "learning_rate": 1.4036374461701523e-06, + "loss": 0.5768, + "step": 12962 + }, + { + "epoch": 0.5349096310968061, + "grad_norm": 3.455734468488178, + "learning_rate": 1.4034373851356641e-06, + "loss": 0.5517, + "step": 12963 + }, + { + "epoch": 0.534950895436164, + "grad_norm": 2.452847607077014, + "learning_rate": 1.4032373258260256e-06, + "loss": 0.4885, + "step": 12964 + }, + { + "epoch": 0.534992159775522, + "grad_norm": 3.7831992296649335, + "learning_rate": 1.403037268244811e-06, + "loss": 0.545, + "step": 12965 + }, + { + "epoch": 0.53503342411488, + "grad_norm": 3.2917585240227933, + "learning_rate": 1.4028372123955928e-06, + "loss": 0.4821, + "step": 12966 + }, + { + "epoch": 0.5350746884542379, + "grad_norm": 2.5635466807175447, + "learning_rate": 1.4026371582819451e-06, + "loss": 0.455, + "step": 12967 + }, + { + "epoch": 0.5351159527935958, + "grad_norm": 2.781455282091552, + "learning_rate": 1.4024371059074414e-06, + "loss": 0.4945, + "step": 12968 + }, + { + "epoch": 0.5351572171329537, + "grad_norm": 9.428521738065168, + "learning_rate": 1.4022370552756552e-06, + "loss": 0.4832, + "step": 12969 + }, + { + "epoch": 0.5351984814723116, + "grad_norm": 2.5803584116147777, + "learning_rate": 1.4020370063901595e-06, + "loss": 0.4709, + "step": 12970 + }, + { + "epoch": 0.5352397458116696, + "grad_norm": 3.500348182132783, + "learning_rate": 1.4018369592545283e-06, + "loss": 0.5003, + "step": 12971 + }, + { + "epoch": 0.5352810101510275, + "grad_norm": 2.336348589265295, + "learning_rate": 1.4016369138723345e-06, + "loss": 0.5506, + "step": 12972 + }, + { + "epoch": 0.5353222744903854, + "grad_norm": 2.063255600919884, + "learning_rate": 1.401436870247151e-06, + "loss": 0.5142, + "step": 12973 + }, + { + "epoch": 0.5353635388297433, + "grad_norm": 2.8402151275290577, + "learning_rate": 1.4012368283825525e-06, + "loss": 0.5169, + "step": 12974 + }, + { + "epoch": 0.5354048031691012, + "grad_norm": 3.2768103377032824, + "learning_rate": 1.401036788282111e-06, + "loss": 0.4987, + "step": 12975 + }, + { + "epoch": 0.5354460675084592, + "grad_norm": 2.3792510558035636, + "learning_rate": 1.4008367499493996e-06, + "loss": 0.5077, + "step": 12976 + }, + { + "epoch": 0.5354873318478172, + "grad_norm": 2.2405769314769137, + "learning_rate": 1.4006367133879922e-06, + "loss": 0.4992, + "step": 12977 + }, + { + "epoch": 0.5355285961871751, + "grad_norm": 5.450650531344429, + "learning_rate": 1.4004366786014614e-06, + "loss": 0.4827, + "step": 12978 + }, + { + "epoch": 0.535569860526533, + "grad_norm": 2.772361291116445, + "learning_rate": 1.4002366455933809e-06, + "loss": 0.4708, + "step": 12979 + }, + { + "epoch": 0.5356111248658909, + "grad_norm": 3.1567364923485894, + "learning_rate": 1.400036614367323e-06, + "loss": 0.5819, + "step": 12980 + }, + { + "epoch": 0.5356523892052488, + "grad_norm": 2.1690930988019645, + "learning_rate": 1.3998365849268616e-06, + "loss": 0.5123, + "step": 12981 + }, + { + "epoch": 0.5356936535446067, + "grad_norm": 4.9387142095785, + "learning_rate": 1.3996365572755693e-06, + "loss": 0.5811, + "step": 12982 + }, + { + "epoch": 0.5357349178839647, + "grad_norm": 2.6208219839274918, + "learning_rate": 1.3994365314170193e-06, + "loss": 0.5328, + "step": 12983 + }, + { + "epoch": 0.5357761822233226, + "grad_norm": 3.1756614855251764, + "learning_rate": 1.399236507354784e-06, + "loss": 0.5666, + "step": 12984 + }, + { + "epoch": 0.5358174465626805, + "grad_norm": 3.7199067643048362, + "learning_rate": 1.3990364850924373e-06, + "loss": 0.5322, + "step": 12985 + }, + { + "epoch": 0.5358587109020384, + "grad_norm": 2.2494531774810564, + "learning_rate": 1.398836464633551e-06, + "loss": 0.4996, + "step": 12986 + }, + { + "epoch": 0.5358999752413964, + "grad_norm": 5.592334887145262, + "learning_rate": 1.398636445981699e-06, + "loss": 0.5361, + "step": 12987 + }, + { + "epoch": 0.5359412395807543, + "grad_norm": 3.635851499142369, + "learning_rate": 1.3984364291404539e-06, + "loss": 0.5207, + "step": 12988 + }, + { + "epoch": 0.5359825039201123, + "grad_norm": 2.3249448988575483, + "learning_rate": 1.398236414113388e-06, + "loss": 0.5555, + "step": 12989 + }, + { + "epoch": 0.5360237682594702, + "grad_norm": 2.927251050384304, + "learning_rate": 1.398036400904074e-06, + "loss": 0.5853, + "step": 12990 + }, + { + "epoch": 0.5360650325988281, + "grad_norm": 3.159380789867953, + "learning_rate": 1.3978363895160853e-06, + "loss": 0.4978, + "step": 12991 + }, + { + "epoch": 0.536106296938186, + "grad_norm": 2.374930184366727, + "learning_rate": 1.3976363799529937e-06, + "loss": 0.4987, + "step": 12992 + }, + { + "epoch": 0.5361475612775439, + "grad_norm": 2.516457442509994, + "learning_rate": 1.397436372218373e-06, + "loss": 0.4826, + "step": 12993 + }, + { + "epoch": 0.5361888256169018, + "grad_norm": 8.197054721273865, + "learning_rate": 1.3972363663157948e-06, + "loss": 0.591, + "step": 12994 + }, + { + "epoch": 0.5362300899562598, + "grad_norm": 2.5464706010957032, + "learning_rate": 1.3970363622488326e-06, + "loss": 0.4567, + "step": 12995 + }, + { + "epoch": 0.5362713542956177, + "grad_norm": 2.8913638683991287, + "learning_rate": 1.3968363600210582e-06, + "loss": 0.5075, + "step": 12996 + }, + { + "epoch": 0.5363126186349757, + "grad_norm": 2.5974312067682312, + "learning_rate": 1.3966363596360446e-06, + "loss": 0.5036, + "step": 12997 + }, + { + "epoch": 0.5363538829743336, + "grad_norm": 5.9062282092684155, + "learning_rate": 1.396436361097364e-06, + "loss": 0.5878, + "step": 12998 + }, + { + "epoch": 0.5363951473136915, + "grad_norm": 4.2266747631989405, + "learning_rate": 1.3962363644085895e-06, + "loss": 0.5282, + "step": 12999 + }, + { + "epoch": 0.5364364116530494, + "grad_norm": 5.03232810812524, + "learning_rate": 1.3960363695732926e-06, + "loss": 0.5224, + "step": 13000 + }, + { + "epoch": 0.5364776759924074, + "grad_norm": 3.0688631247271756, + "learning_rate": 1.395836376595047e-06, + "loss": 0.5169, + "step": 13001 + }, + { + "epoch": 0.5365189403317653, + "grad_norm": 2.1594331298941216, + "learning_rate": 1.395636385477424e-06, + "loss": 0.5137, + "step": 13002 + }, + { + "epoch": 0.5365602046711232, + "grad_norm": 5.05083786646614, + "learning_rate": 1.3954363962239959e-06, + "loss": 0.5407, + "step": 13003 + }, + { + "epoch": 0.5366014690104811, + "grad_norm": 2.5978356248322085, + "learning_rate": 1.3952364088383355e-06, + "loss": 0.5447, + "step": 13004 + }, + { + "epoch": 0.536642733349839, + "grad_norm": 4.096228847250621, + "learning_rate": 1.3950364233240147e-06, + "loss": 0.5152, + "step": 13005 + }, + { + "epoch": 0.536683997689197, + "grad_norm": 4.9089784679073, + "learning_rate": 1.3948364396846065e-06, + "loss": 0.5027, + "step": 13006 + }, + { + "epoch": 0.536725262028555, + "grad_norm": 2.821850286039936, + "learning_rate": 1.394636457923682e-06, + "loss": 0.541, + "step": 13007 + }, + { + "epoch": 0.5367665263679129, + "grad_norm": 2.063445399071492, + "learning_rate": 1.3944364780448143e-06, + "loss": 0.5299, + "step": 13008 + }, + { + "epoch": 0.5368077907072708, + "grad_norm": 2.485462268645334, + "learning_rate": 1.394236500051575e-06, + "loss": 0.5081, + "step": 13009 + }, + { + "epoch": 0.5368490550466287, + "grad_norm": 2.8256964513102054, + "learning_rate": 1.3940365239475365e-06, + "loss": 0.5459, + "step": 13010 + }, + { + "epoch": 0.5368903193859866, + "grad_norm": 4.040174283268366, + "learning_rate": 1.3938365497362707e-06, + "loss": 0.455, + "step": 13011 + }, + { + "epoch": 0.5369315837253446, + "grad_norm": 2.7047781820073684, + "learning_rate": 1.3936365774213494e-06, + "loss": 0.4715, + "step": 13012 + }, + { + "epoch": 0.5369728480647025, + "grad_norm": 14.05128366175998, + "learning_rate": 1.3934366070063455e-06, + "loss": 0.5353, + "step": 13013 + }, + { + "epoch": 0.5370141124040604, + "grad_norm": 5.11315422014908, + "learning_rate": 1.3932366384948307e-06, + "loss": 0.5854, + "step": 13014 + }, + { + "epoch": 0.5370553767434183, + "grad_norm": 9.023792452157855, + "learning_rate": 1.3930366718903756e-06, + "loss": 0.5511, + "step": 13015 + }, + { + "epoch": 0.5370966410827762, + "grad_norm": 7.504609638742686, + "learning_rate": 1.3928367071965539e-06, + "loss": 0.5363, + "step": 13016 + }, + { + "epoch": 0.5371379054221342, + "grad_norm": 2.107401925571607, + "learning_rate": 1.392636744416936e-06, + "loss": 0.5215, + "step": 13017 + }, + { + "epoch": 0.5371791697614922, + "grad_norm": 2.684547884759325, + "learning_rate": 1.392436783555095e-06, + "loss": 0.5445, + "step": 13018 + }, + { + "epoch": 0.5372204341008501, + "grad_norm": 2.7208559560395824, + "learning_rate": 1.3922368246146015e-06, + "loss": 0.453, + "step": 13019 + }, + { + "epoch": 0.537261698440208, + "grad_norm": 8.288809094854797, + "learning_rate": 1.3920368675990287e-06, + "loss": 0.5722, + "step": 13020 + }, + { + "epoch": 0.5373029627795659, + "grad_norm": 8.687162755439138, + "learning_rate": 1.3918369125119467e-06, + "loss": 0.5557, + "step": 13021 + }, + { + "epoch": 0.5373442271189238, + "grad_norm": 7.415308419567866, + "learning_rate": 1.3916369593569285e-06, + "loss": 0.5064, + "step": 13022 + }, + { + "epoch": 0.5373854914582817, + "grad_norm": 2.609536236922153, + "learning_rate": 1.3914370081375452e-06, + "loss": 0.5192, + "step": 13023 + }, + { + "epoch": 0.5374267557976397, + "grad_norm": 14.0912465754767, + "learning_rate": 1.3912370588573685e-06, + "loss": 0.556, + "step": 13024 + }, + { + "epoch": 0.5374680201369976, + "grad_norm": 1.828425464470428, + "learning_rate": 1.39103711151997e-06, + "loss": 0.4775, + "step": 13025 + }, + { + "epoch": 0.5375092844763555, + "grad_norm": 3.639886779282603, + "learning_rate": 1.3908371661289214e-06, + "loss": 0.5259, + "step": 13026 + }, + { + "epoch": 0.5375505488157135, + "grad_norm": 2.2609224679154387, + "learning_rate": 1.3906372226877946e-06, + "loss": 0.5207, + "step": 13027 + }, + { + "epoch": 0.5375918131550714, + "grad_norm": 2.3348541575323343, + "learning_rate": 1.39043728120016e-06, + "loss": 0.5354, + "step": 13028 + }, + { + "epoch": 0.5376330774944293, + "grad_norm": 2.7373906517814413, + "learning_rate": 1.3902373416695894e-06, + "loss": 0.5305, + "step": 13029 + }, + { + "epoch": 0.5376743418337873, + "grad_norm": 5.521339739603816, + "learning_rate": 1.3900374040996549e-06, + "loss": 0.5241, + "step": 13030 + }, + { + "epoch": 0.5377156061731452, + "grad_norm": 3.1395467299312183, + "learning_rate": 1.389837468493927e-06, + "loss": 0.5481, + "step": 13031 + }, + { + "epoch": 0.5377568705125031, + "grad_norm": 7.371389479756208, + "learning_rate": 1.3896375348559778e-06, + "loss": 0.5367, + "step": 13032 + }, + { + "epoch": 0.537798134851861, + "grad_norm": 3.4966709152013915, + "learning_rate": 1.3894376031893783e-06, + "loss": 0.5599, + "step": 13033 + }, + { + "epoch": 0.5378393991912189, + "grad_norm": 3.078700823028064, + "learning_rate": 1.3892376734976998e-06, + "loss": 0.5432, + "step": 13034 + }, + { + "epoch": 0.5378806635305768, + "grad_norm": 2.335827910586659, + "learning_rate": 1.3890377457845132e-06, + "loss": 0.5394, + "step": 13035 + }, + { + "epoch": 0.5379219278699348, + "grad_norm": 2.7738650339124815, + "learning_rate": 1.3888378200533905e-06, + "loss": 0.494, + "step": 13036 + }, + { + "epoch": 0.5379631922092928, + "grad_norm": 2.76723776996048, + "learning_rate": 1.3886378963079022e-06, + "loss": 0.535, + "step": 13037 + }, + { + "epoch": 0.5380044565486507, + "grad_norm": 4.690345507943562, + "learning_rate": 1.3884379745516199e-06, + "loss": 0.5372, + "step": 13038 + }, + { + "epoch": 0.5380457208880086, + "grad_norm": 3.682013843301543, + "learning_rate": 1.3882380547881138e-06, + "loss": 0.5281, + "step": 13039 + }, + { + "epoch": 0.5380869852273665, + "grad_norm": 2.810382460308674, + "learning_rate": 1.3880381370209567e-06, + "loss": 0.5468, + "step": 13040 + }, + { + "epoch": 0.5381282495667244, + "grad_norm": 12.929955905196884, + "learning_rate": 1.387838221253718e-06, + "loss": 0.5699, + "step": 13041 + }, + { + "epoch": 0.5381695139060824, + "grad_norm": 3.1168064423443944, + "learning_rate": 1.387638307489969e-06, + "loss": 0.5698, + "step": 13042 + }, + { + "epoch": 0.5382107782454403, + "grad_norm": 5.493766851784384, + "learning_rate": 1.3874383957332814e-06, + "loss": 0.5514, + "step": 13043 + }, + { + "epoch": 0.5382520425847982, + "grad_norm": 4.134458831006872, + "learning_rate": 1.3872384859872251e-06, + "loss": 0.5829, + "step": 13044 + }, + { + "epoch": 0.5382933069241561, + "grad_norm": 2.233375818288789, + "learning_rate": 1.387038578255372e-06, + "loss": 0.5199, + "step": 13045 + }, + { + "epoch": 0.538334571263514, + "grad_norm": 6.351377862796032, + "learning_rate": 1.386838672541292e-06, + "loss": 0.5793, + "step": 13046 + }, + { + "epoch": 0.5383758356028719, + "grad_norm": 2.223331207384281, + "learning_rate": 1.3866387688485568e-06, + "loss": 0.5242, + "step": 13047 + }, + { + "epoch": 0.53841709994223, + "grad_norm": 4.537059871411781, + "learning_rate": 1.3864388671807363e-06, + "loss": 0.5607, + "step": 13048 + }, + { + "epoch": 0.5384583642815879, + "grad_norm": 3.86248504798263, + "learning_rate": 1.3862389675414025e-06, + "loss": 0.519, + "step": 13049 + }, + { + "epoch": 0.5384996286209458, + "grad_norm": 2.9007208002231777, + "learning_rate": 1.386039069934125e-06, + "loss": 0.5142, + "step": 13050 + }, + { + "epoch": 0.5385408929603037, + "grad_norm": 2.529142268941726, + "learning_rate": 1.3858391743624744e-06, + "loss": 0.5093, + "step": 13051 + }, + { + "epoch": 0.5385821572996616, + "grad_norm": 2.9289055694760906, + "learning_rate": 1.385639280830022e-06, + "loss": 0.5119, + "step": 13052 + }, + { + "epoch": 0.5386234216390196, + "grad_norm": 2.2978615130744027, + "learning_rate": 1.3854393893403388e-06, + "loss": 0.5635, + "step": 13053 + }, + { + "epoch": 0.5386646859783775, + "grad_norm": 5.053469688615509, + "learning_rate": 1.3852394998969938e-06, + "loss": 0.5171, + "step": 13054 + }, + { + "epoch": 0.5387059503177354, + "grad_norm": 4.1753951462582055, + "learning_rate": 1.3850396125035587e-06, + "loss": 0.5466, + "step": 13055 + }, + { + "epoch": 0.5387472146570933, + "grad_norm": 4.089143706445877, + "learning_rate": 1.3848397271636034e-06, + "loss": 0.4861, + "step": 13056 + }, + { + "epoch": 0.5387884789964512, + "grad_norm": 2.4191631445617516, + "learning_rate": 1.3846398438806992e-06, + "loss": 0.5527, + "step": 13057 + }, + { + "epoch": 0.5388297433358092, + "grad_norm": 2.9402904554329674, + "learning_rate": 1.3844399626584154e-06, + "loss": 0.5561, + "step": 13058 + }, + { + "epoch": 0.5388710076751672, + "grad_norm": 3.0744148263643662, + "learning_rate": 1.3842400835003234e-06, + "loss": 0.5472, + "step": 13059 + }, + { + "epoch": 0.5389122720145251, + "grad_norm": 4.191916422743054, + "learning_rate": 1.384040206409993e-06, + "loss": 0.5572, + "step": 13060 + }, + { + "epoch": 0.538953536353883, + "grad_norm": 2.483237962338418, + "learning_rate": 1.3838403313909945e-06, + "loss": 0.5154, + "step": 13061 + }, + { + "epoch": 0.5389948006932409, + "grad_norm": 2.8051004542858187, + "learning_rate": 1.3836404584468982e-06, + "loss": 0.553, + "step": 13062 + }, + { + "epoch": 0.5390360650325988, + "grad_norm": 7.806740814458906, + "learning_rate": 1.3834405875812744e-06, + "loss": 0.5431, + "step": 13063 + }, + { + "epoch": 0.5390773293719567, + "grad_norm": 4.204002874108082, + "learning_rate": 1.3832407187976933e-06, + "loss": 0.5224, + "step": 13064 + }, + { + "epoch": 0.5391185937113147, + "grad_norm": 3.596697189971914, + "learning_rate": 1.3830408520997253e-06, + "loss": 0.5579, + "step": 13065 + }, + { + "epoch": 0.5391598580506726, + "grad_norm": 2.155865013126698, + "learning_rate": 1.38284098749094e-06, + "loss": 0.5114, + "step": 13066 + }, + { + "epoch": 0.5392011223900305, + "grad_norm": 3.827253485485953, + "learning_rate": 1.3826411249749087e-06, + "loss": 0.5363, + "step": 13067 + }, + { + "epoch": 0.5392423867293885, + "grad_norm": 2.8286598244294328, + "learning_rate": 1.3824412645551995e-06, + "loss": 0.588, + "step": 13068 + }, + { + "epoch": 0.5392836510687464, + "grad_norm": 4.418575828384683, + "learning_rate": 1.3822414062353836e-06, + "loss": 0.553, + "step": 13069 + }, + { + "epoch": 0.5393249154081043, + "grad_norm": 4.189948103902928, + "learning_rate": 1.3820415500190305e-06, + "loss": 0.5694, + "step": 13070 + }, + { + "epoch": 0.5393661797474623, + "grad_norm": 3.1066324205189826, + "learning_rate": 1.3818416959097108e-06, + "loss": 0.5224, + "step": 13071 + }, + { + "epoch": 0.5394074440868202, + "grad_norm": 2.535312773641738, + "learning_rate": 1.3816418439109938e-06, + "loss": 0.5243, + "step": 13072 + }, + { + "epoch": 0.5394487084261781, + "grad_norm": 2.963137458685575, + "learning_rate": 1.3814419940264498e-06, + "loss": 0.5364, + "step": 13073 + }, + { + "epoch": 0.539489972765536, + "grad_norm": 3.609388357914424, + "learning_rate": 1.3812421462596478e-06, + "loss": 0.5761, + "step": 13074 + }, + { + "epoch": 0.5395312371048939, + "grad_norm": 2.8309401294277547, + "learning_rate": 1.3810423006141585e-06, + "loss": 0.5076, + "step": 13075 + }, + { + "epoch": 0.5395725014442518, + "grad_norm": 1.7485773365272679, + "learning_rate": 1.3808424570935513e-06, + "loss": 0.437, + "step": 13076 + }, + { + "epoch": 0.5396137657836098, + "grad_norm": 9.6266474697123, + "learning_rate": 1.380642615701396e-06, + "loss": 0.512, + "step": 13077 + }, + { + "epoch": 0.5396550301229678, + "grad_norm": 6.447980004173061, + "learning_rate": 1.3804427764412621e-06, + "loss": 0.5562, + "step": 13078 + }, + { + "epoch": 0.5396962944623257, + "grad_norm": 2.4279003877160394, + "learning_rate": 1.3802429393167193e-06, + "loss": 0.543, + "step": 13079 + }, + { + "epoch": 0.5397375588016836, + "grad_norm": 3.765593081506188, + "learning_rate": 1.3800431043313377e-06, + "loss": 0.4634, + "step": 13080 + }, + { + "epoch": 0.5397788231410415, + "grad_norm": 1.9015223613421388, + "learning_rate": 1.3798432714886856e-06, + "loss": 0.5333, + "step": 13081 + }, + { + "epoch": 0.5398200874803994, + "grad_norm": 13.96474068434932, + "learning_rate": 1.3796434407923336e-06, + "loss": 0.5163, + "step": 13082 + }, + { + "epoch": 0.5398613518197574, + "grad_norm": 3.199473187705594, + "learning_rate": 1.3794436122458506e-06, + "loss": 0.5255, + "step": 13083 + }, + { + "epoch": 0.5399026161591153, + "grad_norm": 4.004661666592632, + "learning_rate": 1.3792437858528065e-06, + "loss": 0.5368, + "step": 13084 + }, + { + "epoch": 0.5399438804984732, + "grad_norm": 2.308826483161988, + "learning_rate": 1.3790439616167702e-06, + "loss": 0.591, + "step": 13085 + }, + { + "epoch": 0.5399851448378311, + "grad_norm": 2.3948392826965086, + "learning_rate": 1.3788441395413116e-06, + "loss": 0.5588, + "step": 13086 + }, + { + "epoch": 0.540026409177189, + "grad_norm": 4.04908441201951, + "learning_rate": 1.3786443196299993e-06, + "loss": 0.5201, + "step": 13087 + }, + { + "epoch": 0.540067673516547, + "grad_norm": 2.626342440785012, + "learning_rate": 1.3784445018864036e-06, + "loss": 0.4703, + "step": 13088 + }, + { + "epoch": 0.540108937855905, + "grad_norm": 2.267034561821238, + "learning_rate": 1.3782446863140933e-06, + "loss": 0.4988, + "step": 13089 + }, + { + "epoch": 0.5401502021952629, + "grad_norm": 2.1792573823007384, + "learning_rate": 1.3780448729166367e-06, + "loss": 0.5117, + "step": 13090 + }, + { + "epoch": 0.5401914665346208, + "grad_norm": 2.8482823952145204, + "learning_rate": 1.3778450616976045e-06, + "loss": 0.5608, + "step": 13091 + }, + { + "epoch": 0.5402327308739787, + "grad_norm": 2.7139559836830154, + "learning_rate": 1.3776452526605646e-06, + "loss": 0.4979, + "step": 13092 + }, + { + "epoch": 0.5402739952133366, + "grad_norm": 5.723579347944782, + "learning_rate": 1.377445445809087e-06, + "loss": 0.5547, + "step": 13093 + }, + { + "epoch": 0.5403152595526945, + "grad_norm": 3.405422868180296, + "learning_rate": 1.3772456411467404e-06, + "loss": 0.4925, + "step": 13094 + }, + { + "epoch": 0.5403565238920525, + "grad_norm": 4.098347838920374, + "learning_rate": 1.377045838677093e-06, + "loss": 0.4916, + "step": 13095 + }, + { + "epoch": 0.5403977882314104, + "grad_norm": 3.2511936096136074, + "learning_rate": 1.3768460384037152e-06, + "loss": 0.4728, + "step": 13096 + }, + { + "epoch": 0.5404390525707683, + "grad_norm": 5.134199247064715, + "learning_rate": 1.3766462403301748e-06, + "loss": 0.5184, + "step": 13097 + }, + { + "epoch": 0.5404803169101263, + "grad_norm": 2.514914380479764, + "learning_rate": 1.3764464444600414e-06, + "loss": 0.5272, + "step": 13098 + }, + { + "epoch": 0.5405215812494842, + "grad_norm": 3.4043281819791553, + "learning_rate": 1.3762466507968834e-06, + "loss": 0.4852, + "step": 13099 + }, + { + "epoch": 0.5405628455888422, + "grad_norm": 2.4676334499673565, + "learning_rate": 1.3760468593442698e-06, + "loss": 0.4937, + "step": 13100 + }, + { + "epoch": 0.5406041099282001, + "grad_norm": 2.49176215731172, + "learning_rate": 1.3758470701057693e-06, + "loss": 0.5274, + "step": 13101 + }, + { + "epoch": 0.540645374267558, + "grad_norm": 2.5660697117004427, + "learning_rate": 1.3756472830849512e-06, + "loss": 0.5017, + "step": 13102 + }, + { + "epoch": 0.5406866386069159, + "grad_norm": 3.057013023908616, + "learning_rate": 1.3754474982853831e-06, + "loss": 0.5306, + "step": 13103 + }, + { + "epoch": 0.5407279029462738, + "grad_norm": 15.133292404393265, + "learning_rate": 1.3752477157106345e-06, + "loss": 0.5228, + "step": 13104 + }, + { + "epoch": 0.5407691672856317, + "grad_norm": 2.9959911314384504, + "learning_rate": 1.3750479353642738e-06, + "loss": 0.5248, + "step": 13105 + }, + { + "epoch": 0.5408104316249897, + "grad_norm": 2.0720637354694076, + "learning_rate": 1.3748481572498703e-06, + "loss": 0.5377, + "step": 13106 + }, + { + "epoch": 0.5408516959643476, + "grad_norm": 6.431556341478983, + "learning_rate": 1.374648381370991e-06, + "loss": 0.5563, + "step": 13107 + }, + { + "epoch": 0.5408929603037055, + "grad_norm": 2.4909322545114843, + "learning_rate": 1.3744486077312054e-06, + "loss": 0.5096, + "step": 13108 + }, + { + "epoch": 0.5409342246430635, + "grad_norm": 2.296444606345938, + "learning_rate": 1.3742488363340813e-06, + "loss": 0.5096, + "step": 13109 + }, + { + "epoch": 0.5409754889824214, + "grad_norm": 2.323116221749257, + "learning_rate": 1.374049067183188e-06, + "loss": 0.5228, + "step": 13110 + }, + { + "epoch": 0.5410167533217793, + "grad_norm": 4.120328657424868, + "learning_rate": 1.373849300282093e-06, + "loss": 0.5031, + "step": 13111 + }, + { + "epoch": 0.5410580176611373, + "grad_norm": 5.436076822894951, + "learning_rate": 1.3736495356343657e-06, + "loss": 0.5396, + "step": 13112 + }, + { + "epoch": 0.5410992820004952, + "grad_norm": 2.517568754076707, + "learning_rate": 1.373449773243573e-06, + "loss": 0.5301, + "step": 13113 + }, + { + "epoch": 0.5411405463398531, + "grad_norm": 12.40154465577695, + "learning_rate": 1.3732500131132848e-06, + "loss": 0.4957, + "step": 13114 + }, + { + "epoch": 0.541181810679211, + "grad_norm": 1.9357093667022711, + "learning_rate": 1.3730502552470678e-06, + "loss": 0.4637, + "step": 13115 + }, + { + "epoch": 0.5412230750185689, + "grad_norm": 3.7853376155667506, + "learning_rate": 1.372850499648491e-06, + "loss": 0.5378, + "step": 13116 + }, + { + "epoch": 0.5412643393579268, + "grad_norm": 8.566806323145961, + "learning_rate": 1.3726507463211223e-06, + "loss": 0.4963, + "step": 13117 + }, + { + "epoch": 0.5413056036972848, + "grad_norm": 3.3088376057142224, + "learning_rate": 1.3724509952685302e-06, + "loss": 0.4573, + "step": 13118 + }, + { + "epoch": 0.5413468680366428, + "grad_norm": 3.856297571100128, + "learning_rate": 1.3722512464942826e-06, + "loss": 0.5179, + "step": 13119 + }, + { + "epoch": 0.5413881323760007, + "grad_norm": 3.518358534533791, + "learning_rate": 1.3720515000019467e-06, + "loss": 0.4659, + "step": 13120 + }, + { + "epoch": 0.5414293967153586, + "grad_norm": 2.792721373454794, + "learning_rate": 1.3718517557950916e-06, + "loss": 0.5221, + "step": 13121 + }, + { + "epoch": 0.5414706610547165, + "grad_norm": 5.577758200748736, + "learning_rate": 1.371652013877284e-06, + "loss": 0.5142, + "step": 13122 + }, + { + "epoch": 0.5415119253940744, + "grad_norm": 2.8819152246830027, + "learning_rate": 1.371452274252093e-06, + "loss": 0.4554, + "step": 13123 + }, + { + "epoch": 0.5415531897334324, + "grad_norm": 12.343292889026944, + "learning_rate": 1.3712525369230856e-06, + "loss": 0.5231, + "step": 13124 + }, + { + "epoch": 0.5415944540727903, + "grad_norm": 2.899870116838799, + "learning_rate": 1.3710528018938305e-06, + "loss": 0.5358, + "step": 13125 + }, + { + "epoch": 0.5416357184121482, + "grad_norm": 2.7738063539773887, + "learning_rate": 1.3708530691678944e-06, + "loss": 0.5212, + "step": 13126 + }, + { + "epoch": 0.5416769827515061, + "grad_norm": 3.908958212821604, + "learning_rate": 1.370653338748846e-06, + "loss": 0.4695, + "step": 13127 + }, + { + "epoch": 0.541718247090864, + "grad_norm": 4.005318804100379, + "learning_rate": 1.3704536106402524e-06, + "loss": 0.5245, + "step": 13128 + }, + { + "epoch": 0.541759511430222, + "grad_norm": 4.252226841079576, + "learning_rate": 1.3702538848456811e-06, + "loss": 0.5518, + "step": 13129 + }, + { + "epoch": 0.54180077576958, + "grad_norm": 5.646165672792739, + "learning_rate": 1.3700541613687004e-06, + "loss": 0.5639, + "step": 13130 + }, + { + "epoch": 0.5418420401089379, + "grad_norm": 5.825925860574211, + "learning_rate": 1.3698544402128773e-06, + "loss": 0.5146, + "step": 13131 + }, + { + "epoch": 0.5418833044482958, + "grad_norm": 2.786198372850497, + "learning_rate": 1.3696547213817797e-06, + "loss": 0.5578, + "step": 13132 + }, + { + "epoch": 0.5419245687876537, + "grad_norm": 3.747969217660083, + "learning_rate": 1.3694550048789748e-06, + "loss": 0.5384, + "step": 13133 + }, + { + "epoch": 0.5419658331270116, + "grad_norm": 3.3814997471155515, + "learning_rate": 1.3692552907080297e-06, + "loss": 0.5506, + "step": 13134 + }, + { + "epoch": 0.5420070974663695, + "grad_norm": 5.181261628052635, + "learning_rate": 1.3690555788725125e-06, + "loss": 0.5551, + "step": 13135 + }, + { + "epoch": 0.5420483618057275, + "grad_norm": 5.926161059270126, + "learning_rate": 1.3688558693759901e-06, + "loss": 0.4676, + "step": 13136 + }, + { + "epoch": 0.5420896261450854, + "grad_norm": 3.5383294013252855, + "learning_rate": 1.3686561622220301e-06, + "loss": 0.5617, + "step": 13137 + }, + { + "epoch": 0.5421308904844433, + "grad_norm": 2.0883269234304773, + "learning_rate": 1.3684564574141991e-06, + "loss": 0.5253, + "step": 13138 + }, + { + "epoch": 0.5421721548238013, + "grad_norm": 2.552941020557438, + "learning_rate": 1.3682567549560655e-06, + "loss": 0.5507, + "step": 13139 + }, + { + "epoch": 0.5422134191631592, + "grad_norm": 7.970577042836629, + "learning_rate": 1.3680570548511954e-06, + "loss": 0.5075, + "step": 13140 + }, + { + "epoch": 0.5422546835025172, + "grad_norm": 7.759163275357981, + "learning_rate": 1.3678573571031566e-06, + "loss": 0.4952, + "step": 13141 + }, + { + "epoch": 0.5422959478418751, + "grad_norm": 5.780238566606006, + "learning_rate": 1.3676576617155157e-06, + "loss": 0.5257, + "step": 13142 + }, + { + "epoch": 0.542337212181233, + "grad_norm": 2.3908924826372893, + "learning_rate": 1.3674579686918404e-06, + "loss": 0.5149, + "step": 13143 + }, + { + "epoch": 0.5423784765205909, + "grad_norm": 6.498158816752186, + "learning_rate": 1.367258278035697e-06, + "loss": 0.5733, + "step": 13144 + }, + { + "epoch": 0.5424197408599488, + "grad_norm": 4.913751656007764, + "learning_rate": 1.3670585897506538e-06, + "loss": 0.5209, + "step": 13145 + }, + { + "epoch": 0.5424610051993067, + "grad_norm": 5.166528663327777, + "learning_rate": 1.3668589038402758e-06, + "loss": 0.4852, + "step": 13146 + }, + { + "epoch": 0.5425022695386646, + "grad_norm": 3.330325556888876, + "learning_rate": 1.366659220308131e-06, + "loss": 0.5166, + "step": 13147 + }, + { + "epoch": 0.5425435338780226, + "grad_norm": 2.7389969011089415, + "learning_rate": 1.366459539157786e-06, + "loss": 0.5097, + "step": 13148 + }, + { + "epoch": 0.5425847982173806, + "grad_norm": 2.2353663122799516, + "learning_rate": 1.366259860392808e-06, + "loss": 0.5177, + "step": 13149 + }, + { + "epoch": 0.5426260625567385, + "grad_norm": 2.7221206570963012, + "learning_rate": 1.3660601840167631e-06, + "loss": 0.5736, + "step": 13150 + }, + { + "epoch": 0.5426673268960964, + "grad_norm": 2.7594555982004882, + "learning_rate": 1.3658605100332185e-06, + "loss": 0.5018, + "step": 13151 + }, + { + "epoch": 0.5427085912354543, + "grad_norm": 2.6361922299654084, + "learning_rate": 1.3656608384457406e-06, + "loss": 0.5152, + "step": 13152 + }, + { + "epoch": 0.5427498555748123, + "grad_norm": 2.0152839472320574, + "learning_rate": 1.3654611692578965e-06, + "loss": 0.5288, + "step": 13153 + }, + { + "epoch": 0.5427911199141702, + "grad_norm": 2.3425525046380544, + "learning_rate": 1.3652615024732521e-06, + "loss": 0.5218, + "step": 13154 + }, + { + "epoch": 0.5428323842535281, + "grad_norm": 2.729251315363822, + "learning_rate": 1.365061838095375e-06, + "loss": 0.5523, + "step": 13155 + }, + { + "epoch": 0.542873648592886, + "grad_norm": 34.0021878093218, + "learning_rate": 1.3648621761278303e-06, + "loss": 0.5613, + "step": 13156 + }, + { + "epoch": 0.5429149129322439, + "grad_norm": 4.740219832184405, + "learning_rate": 1.3646625165741856e-06, + "loss": 0.4968, + "step": 13157 + }, + { + "epoch": 0.5429561772716018, + "grad_norm": 2.396530552297102, + "learning_rate": 1.3644628594380065e-06, + "loss": 0.5282, + "step": 13158 + }, + { + "epoch": 0.5429974416109599, + "grad_norm": 5.472993064544248, + "learning_rate": 1.3642632047228608e-06, + "loss": 0.6127, + "step": 13159 + }, + { + "epoch": 0.5430387059503178, + "grad_norm": 2.611888803657525, + "learning_rate": 1.3640635524323132e-06, + "loss": 0.4818, + "step": 13160 + }, + { + "epoch": 0.5430799702896757, + "grad_norm": 2.658172464552512, + "learning_rate": 1.3638639025699303e-06, + "loss": 0.5024, + "step": 13161 + }, + { + "epoch": 0.5431212346290336, + "grad_norm": 3.0540816109990496, + "learning_rate": 1.3636642551392793e-06, + "loss": 0.5047, + "step": 13162 + }, + { + "epoch": 0.5431624989683915, + "grad_norm": 11.5043723181774, + "learning_rate": 1.3634646101439252e-06, + "loss": 0.4796, + "step": 13163 + }, + { + "epoch": 0.5432037633077494, + "grad_norm": 3.133785289423368, + "learning_rate": 1.363264967587435e-06, + "loss": 0.5072, + "step": 13164 + }, + { + "epoch": 0.5432450276471074, + "grad_norm": 4.300225699729567, + "learning_rate": 1.3630653274733741e-06, + "loss": 0.5447, + "step": 13165 + }, + { + "epoch": 0.5432862919864653, + "grad_norm": 3.172413579702975, + "learning_rate": 1.3628656898053096e-06, + "loss": 0.5144, + "step": 13166 + }, + { + "epoch": 0.5433275563258232, + "grad_norm": 8.502613237738789, + "learning_rate": 1.3626660545868068e-06, + "loss": 0.5675, + "step": 13167 + }, + { + "epoch": 0.5433688206651811, + "grad_norm": 3.6247086308768717, + "learning_rate": 1.3624664218214315e-06, + "loss": 0.5049, + "step": 13168 + }, + { + "epoch": 0.543410085004539, + "grad_norm": 2.463138990592041, + "learning_rate": 1.3622667915127503e-06, + "loss": 0.5154, + "step": 13169 + }, + { + "epoch": 0.543451349343897, + "grad_norm": 3.657477204356682, + "learning_rate": 1.3620671636643286e-06, + "loss": 0.525, + "step": 13170 + }, + { + "epoch": 0.543492613683255, + "grad_norm": 4.198949383101905, + "learning_rate": 1.3618675382797324e-06, + "loss": 0.5699, + "step": 13171 + }, + { + "epoch": 0.5435338780226129, + "grad_norm": 3.377514522174361, + "learning_rate": 1.3616679153625282e-06, + "loss": 0.5514, + "step": 13172 + }, + { + "epoch": 0.5435751423619708, + "grad_norm": 2.7820189868041507, + "learning_rate": 1.3614682949162802e-06, + "loss": 0.5046, + "step": 13173 + }, + { + "epoch": 0.5436164067013287, + "grad_norm": 2.4001197953907663, + "learning_rate": 1.3612686769445553e-06, + "loss": 0.5039, + "step": 13174 + }, + { + "epoch": 0.5436576710406866, + "grad_norm": 2.294163916302904, + "learning_rate": 1.3610690614509187e-06, + "loss": 0.4884, + "step": 13175 + }, + { + "epoch": 0.5436989353800445, + "grad_norm": 2.347094710091621, + "learning_rate": 1.3608694484389366e-06, + "loss": 0.5662, + "step": 13176 + }, + { + "epoch": 0.5437401997194025, + "grad_norm": 3.1076688130816272, + "learning_rate": 1.3606698379121737e-06, + "loss": 0.4878, + "step": 13177 + }, + { + "epoch": 0.5437814640587604, + "grad_norm": 2.826224715470345, + "learning_rate": 1.3604702298741965e-06, + "loss": 0.5143, + "step": 13178 + }, + { + "epoch": 0.5438227283981183, + "grad_norm": 24.193158199833007, + "learning_rate": 1.3602706243285697e-06, + "loss": 0.5382, + "step": 13179 + }, + { + "epoch": 0.5438639927374763, + "grad_norm": 16.183733675688465, + "learning_rate": 1.3600710212788595e-06, + "loss": 0.5324, + "step": 13180 + }, + { + "epoch": 0.5439052570768342, + "grad_norm": 2.8198480348343655, + "learning_rate": 1.3598714207286304e-06, + "loss": 0.4803, + "step": 13181 + }, + { + "epoch": 0.5439465214161922, + "grad_norm": 3.384727953026467, + "learning_rate": 1.3596718226814485e-06, + "loss": 0.5027, + "step": 13182 + }, + { + "epoch": 0.5439877857555501, + "grad_norm": 2.685211434491621, + "learning_rate": 1.359472227140879e-06, + "loss": 0.5312, + "step": 13183 + }, + { + "epoch": 0.544029050094908, + "grad_norm": 2.492988034349116, + "learning_rate": 1.3592726341104869e-06, + "loss": 0.4893, + "step": 13184 + }, + { + "epoch": 0.5440703144342659, + "grad_norm": 2.5216567710568634, + "learning_rate": 1.3590730435938383e-06, + "loss": 0.5016, + "step": 13185 + }, + { + "epoch": 0.5441115787736238, + "grad_norm": 14.06052326820059, + "learning_rate": 1.358873455594497e-06, + "loss": 0.5516, + "step": 13186 + }, + { + "epoch": 0.5441528431129817, + "grad_norm": 4.083115918090202, + "learning_rate": 1.358673870116029e-06, + "loss": 0.5251, + "step": 13187 + }, + { + "epoch": 0.5441941074523396, + "grad_norm": 3.3282249961887933, + "learning_rate": 1.358474287161999e-06, + "loss": 0.5046, + "step": 13188 + }, + { + "epoch": 0.5442353717916976, + "grad_norm": 3.0564103727815763, + "learning_rate": 1.3582747067359724e-06, + "loss": 0.5123, + "step": 13189 + }, + { + "epoch": 0.5442766361310556, + "grad_norm": 7.082780967258358, + "learning_rate": 1.3580751288415143e-06, + "loss": 0.5537, + "step": 13190 + }, + { + "epoch": 0.5443179004704135, + "grad_norm": 4.776600439803598, + "learning_rate": 1.3578755534821889e-06, + "loss": 0.5278, + "step": 13191 + }, + { + "epoch": 0.5443591648097714, + "grad_norm": 14.641885519084472, + "learning_rate": 1.3576759806615622e-06, + "loss": 0.5011, + "step": 13192 + }, + { + "epoch": 0.5444004291491293, + "grad_norm": 2.591411780641265, + "learning_rate": 1.3574764103831982e-06, + "loss": 0.5066, + "step": 13193 + }, + { + "epoch": 0.5444416934884873, + "grad_norm": 2.0893678098438393, + "learning_rate": 1.357276842650662e-06, + "loss": 0.4942, + "step": 13194 + }, + { + "epoch": 0.5444829578278452, + "grad_norm": 2.699100460213354, + "learning_rate": 1.3570772774675185e-06, + "loss": 0.4875, + "step": 13195 + }, + { + "epoch": 0.5445242221672031, + "grad_norm": 5.31741385666775, + "learning_rate": 1.3568777148373327e-06, + "loss": 0.5319, + "step": 13196 + }, + { + "epoch": 0.544565486506561, + "grad_norm": 4.30614649769249, + "learning_rate": 1.3566781547636684e-06, + "loss": 0.5429, + "step": 13197 + }, + { + "epoch": 0.5446067508459189, + "grad_norm": 4.27320243230312, + "learning_rate": 1.3564785972500916e-06, + "loss": 0.5123, + "step": 13198 + }, + { + "epoch": 0.5446480151852768, + "grad_norm": 12.288873525860156, + "learning_rate": 1.3562790423001659e-06, + "loss": 0.569, + "step": 13199 + }, + { + "epoch": 0.5446892795246349, + "grad_norm": 16.481900710999273, + "learning_rate": 1.3560794899174553e-06, + "loss": 0.5657, + "step": 13200 + }, + { + "epoch": 0.5447305438639928, + "grad_norm": 1.97608749318466, + "learning_rate": 1.3558799401055258e-06, + "loss": 0.5401, + "step": 13201 + }, + { + "epoch": 0.5447718082033507, + "grad_norm": 3.0650619927183693, + "learning_rate": 1.3556803928679405e-06, + "loss": 0.537, + "step": 13202 + }, + { + "epoch": 0.5448130725427086, + "grad_norm": 2.1910733484925067, + "learning_rate": 1.3554808482082648e-06, + "loss": 0.5064, + "step": 13203 + }, + { + "epoch": 0.5448543368820665, + "grad_norm": 3.712496551595828, + "learning_rate": 1.3552813061300625e-06, + "loss": 0.5401, + "step": 13204 + }, + { + "epoch": 0.5448956012214244, + "grad_norm": 2.741309375811754, + "learning_rate": 1.3550817666368983e-06, + "loss": 0.5235, + "step": 13205 + }, + { + "epoch": 0.5449368655607824, + "grad_norm": 3.721831830482528, + "learning_rate": 1.3548822297323364e-06, + "loss": 0.5073, + "step": 13206 + }, + { + "epoch": 0.5449781299001403, + "grad_norm": 3.2500438095445325, + "learning_rate": 1.3546826954199402e-06, + "loss": 0.5107, + "step": 13207 + }, + { + "epoch": 0.5450193942394982, + "grad_norm": 2.572849485164105, + "learning_rate": 1.3544831637032753e-06, + "loss": 0.4952, + "step": 13208 + }, + { + "epoch": 0.5450606585788561, + "grad_norm": 2.3003220415150496, + "learning_rate": 1.3542836345859048e-06, + "loss": 0.5165, + "step": 13209 + }, + { + "epoch": 0.5451019229182141, + "grad_norm": 2.515973923145722, + "learning_rate": 1.3540841080713933e-06, + "loss": 0.5101, + "step": 13210 + }, + { + "epoch": 0.545143187257572, + "grad_norm": 2.1246512627132743, + "learning_rate": 1.353884584163305e-06, + "loss": 0.4714, + "step": 13211 + }, + { + "epoch": 0.54518445159693, + "grad_norm": 3.194041832530306, + "learning_rate": 1.3536850628652027e-06, + "loss": 0.51, + "step": 13212 + }, + { + "epoch": 0.5452257159362879, + "grad_norm": 2.3757652185643447, + "learning_rate": 1.3534855441806517e-06, + "loss": 0.5659, + "step": 13213 + }, + { + "epoch": 0.5452669802756458, + "grad_norm": 3.886482532082772, + "learning_rate": 1.3532860281132154e-06, + "loss": 0.483, + "step": 13214 + }, + { + "epoch": 0.5453082446150037, + "grad_norm": 3.4448198830745884, + "learning_rate": 1.3530865146664576e-06, + "loss": 0.5713, + "step": 13215 + }, + { + "epoch": 0.5453495089543616, + "grad_norm": 4.647742585651514, + "learning_rate": 1.3528870038439419e-06, + "loss": 0.545, + "step": 13216 + }, + { + "epoch": 0.5453907732937195, + "grad_norm": 3.4179325290312224, + "learning_rate": 1.3526874956492326e-06, + "loss": 0.4908, + "step": 13217 + }, + { + "epoch": 0.5454320376330775, + "grad_norm": 2.5061429896705474, + "learning_rate": 1.352487990085893e-06, + "loss": 0.5583, + "step": 13218 + }, + { + "epoch": 0.5454733019724354, + "grad_norm": 2.2596476357162247, + "learning_rate": 1.3522884871574872e-06, + "loss": 0.5638, + "step": 13219 + }, + { + "epoch": 0.5455145663117934, + "grad_norm": 2.434692332387429, + "learning_rate": 1.3520889868675782e-06, + "loss": 0.5092, + "step": 13220 + }, + { + "epoch": 0.5455558306511513, + "grad_norm": 14.794923227600565, + "learning_rate": 1.35188948921973e-06, + "loss": 0.5475, + "step": 13221 + }, + { + "epoch": 0.5455970949905092, + "grad_norm": 2.8381338553596267, + "learning_rate": 1.351689994217506e-06, + "loss": 0.54, + "step": 13222 + }, + { + "epoch": 0.5456383593298672, + "grad_norm": 3.23965545201046, + "learning_rate": 1.35149050186447e-06, + "loss": 0.5328, + "step": 13223 + }, + { + "epoch": 0.5456796236692251, + "grad_norm": 2.714505004825788, + "learning_rate": 1.3512910121641856e-06, + "loss": 0.4792, + "step": 13224 + }, + { + "epoch": 0.545720888008583, + "grad_norm": 3.1322152418389546, + "learning_rate": 1.351091525120215e-06, + "loss": 0.5251, + "step": 13225 + }, + { + "epoch": 0.5457621523479409, + "grad_norm": 2.2916731892051034, + "learning_rate": 1.3508920407361222e-06, + "loss": 0.5231, + "step": 13226 + }, + { + "epoch": 0.5458034166872988, + "grad_norm": 2.352648495223073, + "learning_rate": 1.350692559015471e-06, + "loss": 0.5102, + "step": 13227 + }, + { + "epoch": 0.5458446810266567, + "grad_norm": 1.9816502578525237, + "learning_rate": 1.3504930799618237e-06, + "loss": 0.5028, + "step": 13228 + }, + { + "epoch": 0.5458859453660146, + "grad_norm": 2.199249796090485, + "learning_rate": 1.3502936035787444e-06, + "loss": 0.5444, + "step": 13229 + }, + { + "epoch": 0.5459272097053726, + "grad_norm": 2.448346968647904, + "learning_rate": 1.3500941298697955e-06, + "loss": 0.5178, + "step": 13230 + }, + { + "epoch": 0.5459684740447306, + "grad_norm": 1.8739536380644632, + "learning_rate": 1.349894658838541e-06, + "loss": 0.4639, + "step": 13231 + }, + { + "epoch": 0.5460097383840885, + "grad_norm": 2.1593402519984175, + "learning_rate": 1.3496951904885427e-06, + "loss": 0.5474, + "step": 13232 + }, + { + "epoch": 0.5460510027234464, + "grad_norm": 2.8390951115751135, + "learning_rate": 1.3494957248233648e-06, + "loss": 0.5315, + "step": 13233 + }, + { + "epoch": 0.5460922670628043, + "grad_norm": 5.747070568803361, + "learning_rate": 1.3492962618465694e-06, + "loss": 0.4986, + "step": 13234 + }, + { + "epoch": 0.5461335314021623, + "grad_norm": 2.4132174647816376, + "learning_rate": 1.3490968015617201e-06, + "loss": 0.4939, + "step": 13235 + }, + { + "epoch": 0.5461747957415202, + "grad_norm": 2.124864031218275, + "learning_rate": 1.3488973439723791e-06, + "loss": 0.4894, + "step": 13236 + }, + { + "epoch": 0.5462160600808781, + "grad_norm": 3.7637172557135226, + "learning_rate": 1.3486978890821103e-06, + "loss": 0.5098, + "step": 13237 + }, + { + "epoch": 0.546257324420236, + "grad_norm": 2.5945100632982627, + "learning_rate": 1.3484984368944752e-06, + "loss": 0.5239, + "step": 13238 + }, + { + "epoch": 0.5462985887595939, + "grad_norm": 6.611983590176052, + "learning_rate": 1.3482989874130369e-06, + "loss": 0.5114, + "step": 13239 + }, + { + "epoch": 0.5463398530989518, + "grad_norm": 6.583805734812916, + "learning_rate": 1.3480995406413584e-06, + "loss": 0.5323, + "step": 13240 + }, + { + "epoch": 0.5463811174383099, + "grad_norm": 3.71622150706713, + "learning_rate": 1.3479000965830015e-06, + "loss": 0.5285, + "step": 13241 + }, + { + "epoch": 0.5464223817776678, + "grad_norm": 2.3230177529327034, + "learning_rate": 1.34770065524153e-06, + "loss": 0.576, + "step": 13242 + }, + { + "epoch": 0.5464636461170257, + "grad_norm": 5.076298898881539, + "learning_rate": 1.3475012166205056e-06, + "loss": 0.5095, + "step": 13243 + }, + { + "epoch": 0.5465049104563836, + "grad_norm": 2.389747130631952, + "learning_rate": 1.3473017807234908e-06, + "loss": 0.5212, + "step": 13244 + }, + { + "epoch": 0.5465461747957415, + "grad_norm": 3.873740461915294, + "learning_rate": 1.3471023475540486e-06, + "loss": 0.5014, + "step": 13245 + }, + { + "epoch": 0.5465874391350994, + "grad_norm": 3.233822694467096, + "learning_rate": 1.3469029171157406e-06, + "loss": 0.5573, + "step": 13246 + }, + { + "epoch": 0.5466287034744574, + "grad_norm": 3.2797652505016273, + "learning_rate": 1.3467034894121296e-06, + "loss": 0.559, + "step": 13247 + }, + { + "epoch": 0.5466699678138153, + "grad_norm": 2.534433008798705, + "learning_rate": 1.3465040644467778e-06, + "loss": 0.4885, + "step": 13248 + }, + { + "epoch": 0.5467112321531732, + "grad_norm": 7.04835374097285, + "learning_rate": 1.3463046422232474e-06, + "loss": 0.545, + "step": 13249 + }, + { + "epoch": 0.5467524964925311, + "grad_norm": 2.4229006075375827, + "learning_rate": 1.3461052227451004e-06, + "loss": 0.5214, + "step": 13250 + }, + { + "epoch": 0.5467937608318891, + "grad_norm": 9.118806856096281, + "learning_rate": 1.3459058060158996e-06, + "loss": 0.5311, + "step": 13251 + }, + { + "epoch": 0.546835025171247, + "grad_norm": 3.120574641902817, + "learning_rate": 1.3457063920392064e-06, + "loss": 0.4772, + "step": 13252 + }, + { + "epoch": 0.546876289510605, + "grad_norm": 6.809144681195477, + "learning_rate": 1.3455069808185828e-06, + "loss": 0.5284, + "step": 13253 + }, + { + "epoch": 0.5469175538499629, + "grad_norm": 2.8868286062339585, + "learning_rate": 1.3453075723575913e-06, + "loss": 0.5182, + "step": 13254 + }, + { + "epoch": 0.5469588181893208, + "grad_norm": 3.7023936972370732, + "learning_rate": 1.345108166659793e-06, + "loss": 0.506, + "step": 13255 + }, + { + "epoch": 0.5470000825286787, + "grad_norm": 38.63685519737205, + "learning_rate": 1.3449087637287509e-06, + "loss": 0.5359, + "step": 13256 + }, + { + "epoch": 0.5470413468680366, + "grad_norm": 2.732335349542679, + "learning_rate": 1.3447093635680258e-06, + "loss": 0.5304, + "step": 13257 + }, + { + "epoch": 0.5470826112073945, + "grad_norm": 2.240722055794725, + "learning_rate": 1.34450996618118e-06, + "loss": 0.5179, + "step": 13258 + }, + { + "epoch": 0.5471238755467525, + "grad_norm": 4.4223630564787655, + "learning_rate": 1.3443105715717753e-06, + "loss": 0.5631, + "step": 13259 + }, + { + "epoch": 0.5471651398861104, + "grad_norm": 2.9778909763656465, + "learning_rate": 1.3441111797433735e-06, + "loss": 0.5415, + "step": 13260 + }, + { + "epoch": 0.5472064042254684, + "grad_norm": 2.87879238949603, + "learning_rate": 1.3439117906995357e-06, + "loss": 0.4876, + "step": 13261 + }, + { + "epoch": 0.5472476685648263, + "grad_norm": 5.380300951158178, + "learning_rate": 1.343712404443824e-06, + "loss": 0.5412, + "step": 13262 + }, + { + "epoch": 0.5472889329041842, + "grad_norm": 3.4463649265202245, + "learning_rate": 1.3435130209797994e-06, + "loss": 0.4778, + "step": 13263 + }, + { + "epoch": 0.5473301972435421, + "grad_norm": 2.2737763092877095, + "learning_rate": 1.3433136403110246e-06, + "loss": 0.503, + "step": 13264 + }, + { + "epoch": 0.5473714615829001, + "grad_norm": 2.2181107812445395, + "learning_rate": 1.3431142624410593e-06, + "loss": 0.5855, + "step": 13265 + }, + { + "epoch": 0.547412725922258, + "grad_norm": 3.817676213393922, + "learning_rate": 1.342914887373466e-06, + "loss": 0.528, + "step": 13266 + }, + { + "epoch": 0.5474539902616159, + "grad_norm": 2.5931213350495255, + "learning_rate": 1.3427155151118055e-06, + "loss": 0.448, + "step": 13267 + }, + { + "epoch": 0.5474952546009738, + "grad_norm": 5.560569270719307, + "learning_rate": 1.34251614565964e-06, + "loss": 0.5494, + "step": 13268 + }, + { + "epoch": 0.5475365189403317, + "grad_norm": 3.74364542325696, + "learning_rate": 1.3423167790205291e-06, + "loss": 0.5191, + "step": 13269 + }, + { + "epoch": 0.5475777832796896, + "grad_norm": 6.399589872068076, + "learning_rate": 1.3421174151980357e-06, + "loss": 0.5937, + "step": 13270 + }, + { + "epoch": 0.5476190476190477, + "grad_norm": 2.610751531050989, + "learning_rate": 1.34191805419572e-06, + "loss": 0.4712, + "step": 13271 + }, + { + "epoch": 0.5476603119584056, + "grad_norm": 4.024308372349405, + "learning_rate": 1.3417186960171436e-06, + "loss": 0.5713, + "step": 13272 + }, + { + "epoch": 0.5477015762977635, + "grad_norm": 13.752206022811334, + "learning_rate": 1.3415193406658668e-06, + "loss": 0.5254, + "step": 13273 + }, + { + "epoch": 0.5477428406371214, + "grad_norm": 9.397072518652058, + "learning_rate": 1.3413199881454515e-06, + "loss": 0.5323, + "step": 13274 + }, + { + "epoch": 0.5477841049764793, + "grad_norm": 1.9574175852743965, + "learning_rate": 1.3411206384594576e-06, + "loss": 0.5198, + "step": 13275 + }, + { + "epoch": 0.5478253693158373, + "grad_norm": 2.956480212459103, + "learning_rate": 1.3409212916114472e-06, + "loss": 0.5207, + "step": 13276 + }, + { + "epoch": 0.5478666336551952, + "grad_norm": 2.2280364715152987, + "learning_rate": 1.3407219476049806e-06, + "loss": 0.4774, + "step": 13277 + }, + { + "epoch": 0.5479078979945531, + "grad_norm": 2.7948881707824316, + "learning_rate": 1.3405226064436178e-06, + "loss": 0.4983, + "step": 13278 + }, + { + "epoch": 0.547949162333911, + "grad_norm": 2.9380174849260317, + "learning_rate": 1.3403232681309206e-06, + "loss": 0.5568, + "step": 13279 + }, + { + "epoch": 0.5479904266732689, + "grad_norm": 2.9209323097904374, + "learning_rate": 1.3401239326704491e-06, + "loss": 0.4937, + "step": 13280 + }, + { + "epoch": 0.5480316910126269, + "grad_norm": 3.556078571011547, + "learning_rate": 1.339924600065764e-06, + "loss": 0.5103, + "step": 13281 + }, + { + "epoch": 0.5480729553519849, + "grad_norm": 2.9093457194084924, + "learning_rate": 1.339725270320426e-06, + "loss": 0.5047, + "step": 13282 + }, + { + "epoch": 0.5481142196913428, + "grad_norm": 3.3085422194515264, + "learning_rate": 1.339525943437996e-06, + "loss": 0.5994, + "step": 13283 + }, + { + "epoch": 0.5481554840307007, + "grad_norm": 2.1126679692990646, + "learning_rate": 1.339326619422034e-06, + "loss": 0.4825, + "step": 13284 + }, + { + "epoch": 0.5481967483700586, + "grad_norm": 6.409576426430756, + "learning_rate": 1.3391272982761003e-06, + "loss": 0.549, + "step": 13285 + }, + { + "epoch": 0.5482380127094165, + "grad_norm": 3.156364472720474, + "learning_rate": 1.3389279800037556e-06, + "loss": 0.4713, + "step": 13286 + }, + { + "epoch": 0.5482792770487744, + "grad_norm": 2.7012688162950194, + "learning_rate": 1.33872866460856e-06, + "loss": 0.5299, + "step": 13287 + }, + { + "epoch": 0.5483205413881324, + "grad_norm": 1.831238554703343, + "learning_rate": 1.338529352094074e-06, + "loss": 0.5099, + "step": 13288 + }, + { + "epoch": 0.5483618057274903, + "grad_norm": 2.3179898097984175, + "learning_rate": 1.3383300424638576e-06, + "loss": 0.4746, + "step": 13289 + }, + { + "epoch": 0.5484030700668482, + "grad_norm": 3.161070123521792, + "learning_rate": 1.3381307357214715e-06, + "loss": 0.5515, + "step": 13290 + }, + { + "epoch": 0.5484443344062061, + "grad_norm": 2.3765306766822705, + "learning_rate": 1.3379314318704753e-06, + "loss": 0.4894, + "step": 13291 + }, + { + "epoch": 0.5484855987455641, + "grad_norm": 3.490116568807198, + "learning_rate": 1.3377321309144286e-06, + "loss": 0.5594, + "step": 13292 + }, + { + "epoch": 0.548526863084922, + "grad_norm": 3.2799865311709047, + "learning_rate": 1.3375328328568926e-06, + "loss": 0.5302, + "step": 13293 + }, + { + "epoch": 0.54856812742428, + "grad_norm": 2.3548911749181394, + "learning_rate": 1.337333537701426e-06, + "loss": 0.5144, + "step": 13294 + }, + { + "epoch": 0.5486093917636379, + "grad_norm": 4.030500170352523, + "learning_rate": 1.3371342454515898e-06, + "loss": 0.4962, + "step": 13295 + }, + { + "epoch": 0.5486506561029958, + "grad_norm": 3.0053809535828613, + "learning_rate": 1.336934956110943e-06, + "loss": 0.5377, + "step": 13296 + }, + { + "epoch": 0.5486919204423537, + "grad_norm": 2.2814457120169216, + "learning_rate": 1.3367356696830463e-06, + "loss": 0.486, + "step": 13297 + }, + { + "epoch": 0.5487331847817116, + "grad_norm": 4.456581520640316, + "learning_rate": 1.3365363861714586e-06, + "loss": 0.4848, + "step": 13298 + }, + { + "epoch": 0.5487744491210695, + "grad_norm": 3.11890638943778, + "learning_rate": 1.33633710557974e-06, + "loss": 0.5009, + "step": 13299 + }, + { + "epoch": 0.5488157134604275, + "grad_norm": 2.7861856372106173, + "learning_rate": 1.3361378279114501e-06, + "loss": 0.4643, + "step": 13300 + }, + { + "epoch": 0.5488569777997854, + "grad_norm": 9.978242447481763, + "learning_rate": 1.335938553170149e-06, + "loss": 0.4794, + "step": 13301 + }, + { + "epoch": 0.5488982421391434, + "grad_norm": 2.834745633719472, + "learning_rate": 1.335739281359395e-06, + "loss": 0.5917, + "step": 13302 + }, + { + "epoch": 0.5489395064785013, + "grad_norm": 3.2750333618492378, + "learning_rate": 1.3355400124827497e-06, + "loss": 0.5701, + "step": 13303 + }, + { + "epoch": 0.5489807708178592, + "grad_norm": 2.570034641228822, + "learning_rate": 1.3353407465437701e-06, + "loss": 0.466, + "step": 13304 + }, + { + "epoch": 0.5490220351572171, + "grad_norm": 3.5180907056919626, + "learning_rate": 1.3351414835460169e-06, + "loss": 0.5873, + "step": 13305 + }, + { + "epoch": 0.5490632994965751, + "grad_norm": 1.6618813621927893, + "learning_rate": 1.334942223493049e-06, + "loss": 0.5326, + "step": 13306 + }, + { + "epoch": 0.549104563835933, + "grad_norm": 3.155530467692797, + "learning_rate": 1.3347429663884261e-06, + "loss": 0.5294, + "step": 13307 + }, + { + "epoch": 0.5491458281752909, + "grad_norm": 8.824917731945797, + "learning_rate": 1.334543712235707e-06, + "loss": 0.5149, + "step": 13308 + }, + { + "epoch": 0.5491870925146488, + "grad_norm": 3.081303268492744, + "learning_rate": 1.3343444610384516e-06, + "loss": 0.5343, + "step": 13309 + }, + { + "epoch": 0.5492283568540067, + "grad_norm": 3.6260120370239073, + "learning_rate": 1.3341452128002178e-06, + "loss": 0.454, + "step": 13310 + }, + { + "epoch": 0.5492696211933646, + "grad_norm": 1.9866162954294346, + "learning_rate": 1.333945967524566e-06, + "loss": 0.5273, + "step": 13311 + }, + { + "epoch": 0.5493108855327227, + "grad_norm": 2.997242090058199, + "learning_rate": 1.3337467252150543e-06, + "loss": 0.5123, + "step": 13312 + }, + { + "epoch": 0.5493521498720806, + "grad_norm": 5.071158697679063, + "learning_rate": 1.3335474858752423e-06, + "loss": 0.5606, + "step": 13313 + }, + { + "epoch": 0.5493934142114385, + "grad_norm": 2.4316926191306543, + "learning_rate": 1.3333482495086882e-06, + "loss": 0.5823, + "step": 13314 + }, + { + "epoch": 0.5494346785507964, + "grad_norm": 2.9506807641100092, + "learning_rate": 1.3331490161189517e-06, + "loss": 0.5361, + "step": 13315 + }, + { + "epoch": 0.5494759428901543, + "grad_norm": 92.78740568872058, + "learning_rate": 1.3329497857095913e-06, + "loss": 0.5293, + "step": 13316 + }, + { + "epoch": 0.5495172072295123, + "grad_norm": 3.1737803811912397, + "learning_rate": 1.3327505582841651e-06, + "loss": 0.5266, + "step": 13317 + }, + { + "epoch": 0.5495584715688702, + "grad_norm": 2.595850171187798, + "learning_rate": 1.3325513338462328e-06, + "loss": 0.5087, + "step": 13318 + }, + { + "epoch": 0.5495997359082281, + "grad_norm": 10.395982435035723, + "learning_rate": 1.332352112399352e-06, + "loss": 0.5309, + "step": 13319 + }, + { + "epoch": 0.549641000247586, + "grad_norm": 5.791986728038225, + "learning_rate": 1.3321528939470824e-06, + "loss": 0.5417, + "step": 13320 + }, + { + "epoch": 0.5496822645869439, + "grad_norm": 3.147941213379061, + "learning_rate": 1.3319536784929814e-06, + "loss": 0.5136, + "step": 13321 + }, + { + "epoch": 0.5497235289263019, + "grad_norm": 2.5615947919023263, + "learning_rate": 1.3317544660406086e-06, + "loss": 0.5479, + "step": 13322 + }, + { + "epoch": 0.5497647932656599, + "grad_norm": 2.5277682217520487, + "learning_rate": 1.331555256593522e-06, + "loss": 0.5535, + "step": 13323 + }, + { + "epoch": 0.5498060576050178, + "grad_norm": 2.484681387021173, + "learning_rate": 1.3313560501552795e-06, + "loss": 0.4734, + "step": 13324 + }, + { + "epoch": 0.5498473219443757, + "grad_norm": 3.647696229941251, + "learning_rate": 1.33115684672944e-06, + "loss": 0.4804, + "step": 13325 + }, + { + "epoch": 0.5498885862837336, + "grad_norm": 2.9928423093363135, + "learning_rate": 1.3309576463195615e-06, + "loss": 0.499, + "step": 13326 + }, + { + "epoch": 0.5499298506230915, + "grad_norm": 4.081564605318463, + "learning_rate": 1.3307584489292024e-06, + "loss": 0.4904, + "step": 13327 + }, + { + "epoch": 0.5499711149624494, + "grad_norm": 3.3488367706018147, + "learning_rate": 1.3305592545619206e-06, + "loss": 0.5111, + "step": 13328 + }, + { + "epoch": 0.5500123793018074, + "grad_norm": 3.887653589583405, + "learning_rate": 1.3303600632212752e-06, + "loss": 0.5395, + "step": 13329 + }, + { + "epoch": 0.5500536436411653, + "grad_norm": 4.301985789222389, + "learning_rate": 1.330160874910823e-06, + "loss": 0.4415, + "step": 13330 + }, + { + "epoch": 0.5500949079805232, + "grad_norm": 3.3666209454382776, + "learning_rate": 1.329961689634122e-06, + "loss": 0.5517, + "step": 13331 + }, + { + "epoch": 0.5501361723198812, + "grad_norm": 2.610635371366213, + "learning_rate": 1.3297625073947311e-06, + "loss": 0.508, + "step": 13332 + }, + { + "epoch": 0.5501774366592391, + "grad_norm": 8.197575138222113, + "learning_rate": 1.3295633281962072e-06, + "loss": 0.5116, + "step": 13333 + }, + { + "epoch": 0.550218700998597, + "grad_norm": 11.297836528540635, + "learning_rate": 1.3293641520421089e-06, + "loss": 0.5072, + "step": 13334 + }, + { + "epoch": 0.550259965337955, + "grad_norm": 3.2787247173365413, + "learning_rate": 1.3291649789359936e-06, + "loss": 0.5614, + "step": 13335 + }, + { + "epoch": 0.5503012296773129, + "grad_norm": 4.365399231768667, + "learning_rate": 1.3289658088814193e-06, + "loss": 0.5821, + "step": 13336 + }, + { + "epoch": 0.5503424940166708, + "grad_norm": 2.1958420693799883, + "learning_rate": 1.3287666418819433e-06, + "loss": 0.5179, + "step": 13337 + }, + { + "epoch": 0.5503837583560287, + "grad_norm": 2.553528652228177, + "learning_rate": 1.3285674779411236e-06, + "loss": 0.4687, + "step": 13338 + }, + { + "epoch": 0.5504250226953866, + "grad_norm": 2.513222472508235, + "learning_rate": 1.3283683170625174e-06, + "loss": 0.5258, + "step": 13339 + }, + { + "epoch": 0.5504662870347445, + "grad_norm": 2.6292874063537566, + "learning_rate": 1.3281691592496827e-06, + "loss": 0.4896, + "step": 13340 + }, + { + "epoch": 0.5505075513741025, + "grad_norm": 5.391807363477231, + "learning_rate": 1.3279700045061764e-06, + "loss": 0.5164, + "step": 13341 + }, + { + "epoch": 0.5505488157134605, + "grad_norm": 3.4962720327623127, + "learning_rate": 1.3277708528355564e-06, + "loss": 0.5175, + "step": 13342 + }, + { + "epoch": 0.5505900800528184, + "grad_norm": 8.271728356076613, + "learning_rate": 1.3275717042413803e-06, + "loss": 0.5486, + "step": 13343 + }, + { + "epoch": 0.5506313443921763, + "grad_norm": 4.311259705743844, + "learning_rate": 1.3273725587272044e-06, + "loss": 0.5351, + "step": 13344 + }, + { + "epoch": 0.5506726087315342, + "grad_norm": 2.0778138049858472, + "learning_rate": 1.3271734162965864e-06, + "loss": 0.5772, + "step": 13345 + }, + { + "epoch": 0.5507138730708921, + "grad_norm": 1.9885386398472649, + "learning_rate": 1.3269742769530836e-06, + "loss": 0.4724, + "step": 13346 + }, + { + "epoch": 0.5507551374102501, + "grad_norm": 3.0743834547786, + "learning_rate": 1.326775140700253e-06, + "loss": 0.5098, + "step": 13347 + }, + { + "epoch": 0.550796401749608, + "grad_norm": 5.066318872276549, + "learning_rate": 1.3265760075416518e-06, + "loss": 0.4732, + "step": 13348 + }, + { + "epoch": 0.5508376660889659, + "grad_norm": 4.254472250509109, + "learning_rate": 1.3263768774808367e-06, + "loss": 0.5133, + "step": 13349 + }, + { + "epoch": 0.5508789304283238, + "grad_norm": 6.1905455283126125, + "learning_rate": 1.3261777505213652e-06, + "loss": 0.5436, + "step": 13350 + }, + { + "epoch": 0.5509201947676817, + "grad_norm": 3.6891850101331043, + "learning_rate": 1.3259786266667935e-06, + "loss": 0.4833, + "step": 13351 + }, + { + "epoch": 0.5509614591070396, + "grad_norm": 1.9505554279014974, + "learning_rate": 1.3257795059206792e-06, + "loss": 0.513, + "step": 13352 + }, + { + "epoch": 0.5510027234463977, + "grad_norm": 3.568492497570319, + "learning_rate": 1.3255803882865786e-06, + "loss": 0.4939, + "step": 13353 + }, + { + "epoch": 0.5510439877857556, + "grad_norm": 6.132235538856788, + "learning_rate": 1.3253812737680488e-06, + "loss": 0.5058, + "step": 13354 + }, + { + "epoch": 0.5510852521251135, + "grad_norm": 2.2513097551411034, + "learning_rate": 1.3251821623686462e-06, + "loss": 0.552, + "step": 13355 + }, + { + "epoch": 0.5511265164644714, + "grad_norm": 3.384579551297929, + "learning_rate": 1.3249830540919278e-06, + "loss": 0.5358, + "step": 13356 + }, + { + "epoch": 0.5511677808038293, + "grad_norm": 3.6954959454953284, + "learning_rate": 1.3247839489414498e-06, + "loss": 0.5559, + "step": 13357 + }, + { + "epoch": 0.5512090451431872, + "grad_norm": 8.463153923975032, + "learning_rate": 1.3245848469207683e-06, + "loss": 0.5369, + "step": 13358 + }, + { + "epoch": 0.5512503094825452, + "grad_norm": 7.916991199412779, + "learning_rate": 1.3243857480334408e-06, + "loss": 0.4748, + "step": 13359 + }, + { + "epoch": 0.5512915738219031, + "grad_norm": 3.2796083438972734, + "learning_rate": 1.3241866522830228e-06, + "loss": 0.526, + "step": 13360 + }, + { + "epoch": 0.551332838161261, + "grad_norm": 5.424776640395204, + "learning_rate": 1.3239875596730713e-06, + "loss": 0.5655, + "step": 13361 + }, + { + "epoch": 0.5513741025006189, + "grad_norm": 4.067835162543003, + "learning_rate": 1.323788470207142e-06, + "loss": 0.6034, + "step": 13362 + }, + { + "epoch": 0.5514153668399769, + "grad_norm": 62.645842216927335, + "learning_rate": 1.3235893838887913e-06, + "loss": 0.533, + "step": 13363 + }, + { + "epoch": 0.5514566311793349, + "grad_norm": 3.6688415792094724, + "learning_rate": 1.323390300721576e-06, + "loss": 0.5294, + "step": 13364 + }, + { + "epoch": 0.5514978955186928, + "grad_norm": 4.647491689425961, + "learning_rate": 1.3231912207090513e-06, + "loss": 0.5172, + "step": 13365 + }, + { + "epoch": 0.5515391598580507, + "grad_norm": 3.5225364872334177, + "learning_rate": 1.322992143854774e-06, + "loss": 0.5357, + "step": 13366 + }, + { + "epoch": 0.5515804241974086, + "grad_norm": 2.6739758764297927, + "learning_rate": 1.3227930701622995e-06, + "loss": 0.4895, + "step": 13367 + }, + { + "epoch": 0.5516216885367665, + "grad_norm": 9.842692052862148, + "learning_rate": 1.3225939996351845e-06, + "loss": 0.5148, + "step": 13368 + }, + { + "epoch": 0.5516629528761244, + "grad_norm": 3.055844162857782, + "learning_rate": 1.3223949322769847e-06, + "loss": 0.5553, + "step": 13369 + }, + { + "epoch": 0.5517042172154824, + "grad_norm": 5.784034396901458, + "learning_rate": 1.322195868091255e-06, + "loss": 0.4935, + "step": 13370 + }, + { + "epoch": 0.5517454815548403, + "grad_norm": 6.058466058205233, + "learning_rate": 1.3219968070815523e-06, + "loss": 0.5207, + "step": 13371 + }, + { + "epoch": 0.5517867458941982, + "grad_norm": 2.5439113534124207, + "learning_rate": 1.3217977492514315e-06, + "loss": 0.5197, + "step": 13372 + }, + { + "epoch": 0.5518280102335562, + "grad_norm": 2.468922238546982, + "learning_rate": 1.3215986946044492e-06, + "loss": 0.4521, + "step": 13373 + }, + { + "epoch": 0.5518692745729141, + "grad_norm": 2.7689326024043446, + "learning_rate": 1.32139964314416e-06, + "loss": 0.5767, + "step": 13374 + }, + { + "epoch": 0.551910538912272, + "grad_norm": 5.224414284715617, + "learning_rate": 1.3212005948741204e-06, + "loss": 0.5393, + "step": 13375 + }, + { + "epoch": 0.55195180325163, + "grad_norm": 3.2439731781975443, + "learning_rate": 1.321001549797885e-06, + "loss": 0.4867, + "step": 13376 + }, + { + "epoch": 0.5519930675909879, + "grad_norm": 3.018009967038671, + "learning_rate": 1.3208025079190102e-06, + "loss": 0.4976, + "step": 13377 + }, + { + "epoch": 0.5520343319303458, + "grad_norm": 8.912679643648994, + "learning_rate": 1.3206034692410506e-06, + "loss": 0.5371, + "step": 13378 + }, + { + "epoch": 0.5520755962697037, + "grad_norm": 2.1248626828441064, + "learning_rate": 1.3204044337675622e-06, + "loss": 0.527, + "step": 13379 + }, + { + "epoch": 0.5521168606090616, + "grad_norm": 2.4341774855499243, + "learning_rate": 1.3202054015020992e-06, + "loss": 0.5243, + "step": 13380 + }, + { + "epoch": 0.5521581249484195, + "grad_norm": 2.8547659881739826, + "learning_rate": 1.3200063724482182e-06, + "loss": 0.5285, + "step": 13381 + }, + { + "epoch": 0.5521993892877775, + "grad_norm": 3.7503040683257978, + "learning_rate": 1.3198073466094737e-06, + "loss": 0.5358, + "step": 13382 + }, + { + "epoch": 0.5522406536271355, + "grad_norm": 3.240407930919623, + "learning_rate": 1.3196083239894207e-06, + "loss": 0.5596, + "step": 13383 + }, + { + "epoch": 0.5522819179664934, + "grad_norm": 2.836093749141411, + "learning_rate": 1.3194093045916139e-06, + "loss": 0.5251, + "step": 13384 + }, + { + "epoch": 0.5523231823058513, + "grad_norm": 2.939867538844663, + "learning_rate": 1.319210288419609e-06, + "loss": 0.5705, + "step": 13385 + }, + { + "epoch": 0.5523644466452092, + "grad_norm": 3.3270004236476423, + "learning_rate": 1.3190112754769601e-06, + "loss": 0.4616, + "step": 13386 + }, + { + "epoch": 0.5524057109845671, + "grad_norm": 2.47947768658192, + "learning_rate": 1.318812265767223e-06, + "loss": 0.5459, + "step": 13387 + }, + { + "epoch": 0.5524469753239251, + "grad_norm": 8.945665991448651, + "learning_rate": 1.318613259293952e-06, + "loss": 0.5316, + "step": 13388 + }, + { + "epoch": 0.552488239663283, + "grad_norm": 2.4345306010854473, + "learning_rate": 1.318414256060702e-06, + "loss": 0.5478, + "step": 13389 + }, + { + "epoch": 0.5525295040026409, + "grad_norm": 2.755064465859372, + "learning_rate": 1.3182152560710275e-06, + "loss": 0.5085, + "step": 13390 + }, + { + "epoch": 0.5525707683419988, + "grad_norm": 2.5060911899943537, + "learning_rate": 1.3180162593284835e-06, + "loss": 0.5083, + "step": 13391 + }, + { + "epoch": 0.5526120326813567, + "grad_norm": 3.280235920231275, + "learning_rate": 1.3178172658366241e-06, + "loss": 0.5275, + "step": 13392 + }, + { + "epoch": 0.5526532970207148, + "grad_norm": 2.4550249139429923, + "learning_rate": 1.3176182755990043e-06, + "loss": 0.4785, + "step": 13393 + }, + { + "epoch": 0.5526945613600727, + "grad_norm": 4.2109129687670315, + "learning_rate": 1.3174192886191783e-06, + "loss": 0.5218, + "step": 13394 + }, + { + "epoch": 0.5527358256994306, + "grad_norm": 2.6121823831037196, + "learning_rate": 1.317220304900701e-06, + "loss": 0.5259, + "step": 13395 + }, + { + "epoch": 0.5527770900387885, + "grad_norm": 2.575202268336711, + "learning_rate": 1.317021324447126e-06, + "loss": 0.4823, + "step": 13396 + }, + { + "epoch": 0.5528183543781464, + "grad_norm": 2.748287076897796, + "learning_rate": 1.3168223472620075e-06, + "loss": 0.5686, + "step": 13397 + }, + { + "epoch": 0.5528596187175043, + "grad_norm": 2.836117325940052, + "learning_rate": 1.3166233733489005e-06, + "loss": 0.5292, + "step": 13398 + }, + { + "epoch": 0.5529008830568622, + "grad_norm": 3.549152583030981, + "learning_rate": 1.3164244027113584e-06, + "loss": 0.5384, + "step": 13399 + }, + { + "epoch": 0.5529421473962202, + "grad_norm": 6.91110025047082, + "learning_rate": 1.3162254353529362e-06, + "loss": 0.5129, + "step": 13400 + }, + { + "epoch": 0.5529834117355781, + "grad_norm": 2.3498251994192563, + "learning_rate": 1.3160264712771873e-06, + "loss": 0.5033, + "step": 13401 + }, + { + "epoch": 0.553024676074936, + "grad_norm": 3.8871107835655776, + "learning_rate": 1.3158275104876657e-06, + "loss": 0.5684, + "step": 13402 + }, + { + "epoch": 0.553065940414294, + "grad_norm": 13.1171923871153, + "learning_rate": 1.3156285529879256e-06, + "loss": 0.5423, + "step": 13403 + }, + { + "epoch": 0.5531072047536519, + "grad_norm": 5.074913454132924, + "learning_rate": 1.3154295987815204e-06, + "loss": 0.5761, + "step": 13404 + }, + { + "epoch": 0.5531484690930099, + "grad_norm": 5.7675473665143375, + "learning_rate": 1.3152306478720048e-06, + "loss": 0.4611, + "step": 13405 + }, + { + "epoch": 0.5531897334323678, + "grad_norm": 2.170512181475236, + "learning_rate": 1.3150317002629316e-06, + "loss": 0.5523, + "step": 13406 + }, + { + "epoch": 0.5532309977717257, + "grad_norm": 12.88573876227004, + "learning_rate": 1.3148327559578554e-06, + "loss": 0.52, + "step": 13407 + }, + { + "epoch": 0.5532722621110836, + "grad_norm": 2.0388808407504793, + "learning_rate": 1.3146338149603297e-06, + "loss": 0.5349, + "step": 13408 + }, + { + "epoch": 0.5533135264504415, + "grad_norm": 10.372959785558963, + "learning_rate": 1.314434877273907e-06, + "loss": 0.5746, + "step": 13409 + }, + { + "epoch": 0.5533547907897994, + "grad_norm": 1.9621552364183212, + "learning_rate": 1.3142359429021422e-06, + "loss": 0.5195, + "step": 13410 + }, + { + "epoch": 0.5533960551291573, + "grad_norm": 2.6358788380956515, + "learning_rate": 1.3140370118485876e-06, + "loss": 0.4974, + "step": 13411 + }, + { + "epoch": 0.5534373194685153, + "grad_norm": 3.046921787199806, + "learning_rate": 1.3138380841167975e-06, + "loss": 0.4877, + "step": 13412 + }, + { + "epoch": 0.5534785838078732, + "grad_norm": 2.3905025870053356, + "learning_rate": 1.3136391597103249e-06, + "loss": 0.4925, + "step": 13413 + }, + { + "epoch": 0.5535198481472312, + "grad_norm": 2.628524293461495, + "learning_rate": 1.313440238632723e-06, + "loss": 0.5451, + "step": 13414 + }, + { + "epoch": 0.5535611124865891, + "grad_norm": 2.6534732457408055, + "learning_rate": 1.313241320887545e-06, + "loss": 0.4605, + "step": 13415 + }, + { + "epoch": 0.553602376825947, + "grad_norm": 2.157763593662755, + "learning_rate": 1.3130424064783448e-06, + "loss": 0.5096, + "step": 13416 + }, + { + "epoch": 0.553643641165305, + "grad_norm": 3.2436621747037178, + "learning_rate": 1.3128434954086742e-06, + "loss": 0.4931, + "step": 13417 + }, + { + "epoch": 0.5536849055046629, + "grad_norm": 2.0859013264987705, + "learning_rate": 1.3126445876820875e-06, + "loss": 0.5241, + "step": 13418 + }, + { + "epoch": 0.5537261698440208, + "grad_norm": 3.9702585826726455, + "learning_rate": 1.3124456833021368e-06, + "loss": 0.5348, + "step": 13419 + }, + { + "epoch": 0.5537674341833787, + "grad_norm": 1.6364904219244234, + "learning_rate": 1.3122467822723757e-06, + "loss": 0.5324, + "step": 13420 + }, + { + "epoch": 0.5538086985227366, + "grad_norm": 12.423933916360655, + "learning_rate": 1.312047884596357e-06, + "loss": 0.5064, + "step": 13421 + }, + { + "epoch": 0.5538499628620945, + "grad_norm": 7.220230539820669, + "learning_rate": 1.311848990277633e-06, + "loss": 0.4923, + "step": 13422 + }, + { + "epoch": 0.5538912272014525, + "grad_norm": 3.5017446125338445, + "learning_rate": 1.3116500993197564e-06, + "loss": 0.5086, + "step": 13423 + }, + { + "epoch": 0.5539324915408105, + "grad_norm": 4.309308025154251, + "learning_rate": 1.3114512117262804e-06, + "loss": 0.5458, + "step": 13424 + }, + { + "epoch": 0.5539737558801684, + "grad_norm": 40.37181983909809, + "learning_rate": 1.3112523275007572e-06, + "loss": 0.4811, + "step": 13425 + }, + { + "epoch": 0.5540150202195263, + "grad_norm": 7.89682131362951, + "learning_rate": 1.3110534466467403e-06, + "loss": 0.515, + "step": 13426 + }, + { + "epoch": 0.5540562845588842, + "grad_norm": 1.9495200399741401, + "learning_rate": 1.3108545691677808e-06, + "loss": 0.4688, + "step": 13427 + }, + { + "epoch": 0.5540975488982421, + "grad_norm": 2.7362298529412827, + "learning_rate": 1.3106556950674322e-06, + "loss": 0.5617, + "step": 13428 + }, + { + "epoch": 0.5541388132376001, + "grad_norm": 2.460341407338487, + "learning_rate": 1.3104568243492464e-06, + "loss": 0.4817, + "step": 13429 + }, + { + "epoch": 0.554180077576958, + "grad_norm": 2.8388559453798483, + "learning_rate": 1.310257957016776e-06, + "loss": 0.4925, + "step": 13430 + }, + { + "epoch": 0.5542213419163159, + "grad_norm": 2.255201924083249, + "learning_rate": 1.3100590930735728e-06, + "loss": 0.5415, + "step": 13431 + }, + { + "epoch": 0.5542626062556738, + "grad_norm": 4.606127870173587, + "learning_rate": 1.3098602325231897e-06, + "loss": 0.5163, + "step": 13432 + }, + { + "epoch": 0.5543038705950317, + "grad_norm": 3.38078157489267, + "learning_rate": 1.3096613753691784e-06, + "loss": 0.4979, + "step": 13433 + }, + { + "epoch": 0.5543451349343897, + "grad_norm": 2.296353874586031, + "learning_rate": 1.3094625216150912e-06, + "loss": 0.4784, + "step": 13434 + }, + { + "epoch": 0.5543863992737477, + "grad_norm": 6.558443577623897, + "learning_rate": 1.3092636712644805e-06, + "loss": 0.566, + "step": 13435 + }, + { + "epoch": 0.5544276636131056, + "grad_norm": 6.90631093321347, + "learning_rate": 1.3090648243208971e-06, + "loss": 0.5435, + "step": 13436 + }, + { + "epoch": 0.5544689279524635, + "grad_norm": 2.672582680050446, + "learning_rate": 1.3088659807878939e-06, + "loss": 0.5467, + "step": 13437 + }, + { + "epoch": 0.5545101922918214, + "grad_norm": 10.70398703299419, + "learning_rate": 1.3086671406690219e-06, + "loss": 0.542, + "step": 13438 + }, + { + "epoch": 0.5545514566311793, + "grad_norm": 3.6336950126599183, + "learning_rate": 1.308468303967834e-06, + "loss": 0.5209, + "step": 13439 + }, + { + "epoch": 0.5545927209705372, + "grad_norm": 3.05855781571408, + "learning_rate": 1.3082694706878812e-06, + "loss": 0.4912, + "step": 13440 + }, + { + "epoch": 0.5546339853098952, + "grad_norm": 8.698233117212611, + "learning_rate": 1.308070640832715e-06, + "loss": 0.4686, + "step": 13441 + }, + { + "epoch": 0.5546752496492531, + "grad_norm": 3.0475899038387486, + "learning_rate": 1.3078718144058876e-06, + "loss": 0.4824, + "step": 13442 + }, + { + "epoch": 0.554716513988611, + "grad_norm": 2.3383819726557373, + "learning_rate": 1.30767299141095e-06, + "loss": 0.493, + "step": 13443 + }, + { + "epoch": 0.554757778327969, + "grad_norm": 2.733176599379195, + "learning_rate": 1.307474171851454e-06, + "loss": 0.4977, + "step": 13444 + }, + { + "epoch": 0.5547990426673269, + "grad_norm": 2.65381591355308, + "learning_rate": 1.3072753557309507e-06, + "loss": 0.4958, + "step": 13445 + }, + { + "epoch": 0.5548403070066849, + "grad_norm": 10.44483006088483, + "learning_rate": 1.3070765430529922e-06, + "loss": 0.5105, + "step": 13446 + }, + { + "epoch": 0.5548815713460428, + "grad_norm": 3.476685340377823, + "learning_rate": 1.3068777338211287e-06, + "loss": 0.5349, + "step": 13447 + }, + { + "epoch": 0.5549228356854007, + "grad_norm": 345.84502299388123, + "learning_rate": 1.3066789280389126e-06, + "loss": 0.5634, + "step": 13448 + }, + { + "epoch": 0.5549641000247586, + "grad_norm": 2.9373722370439097, + "learning_rate": 1.3064801257098942e-06, + "loss": 0.5448, + "step": 13449 + }, + { + "epoch": 0.5550053643641165, + "grad_norm": 2.8249009160353973, + "learning_rate": 1.3062813268376246e-06, + "loss": 0.5417, + "step": 13450 + }, + { + "epoch": 0.5550466287034744, + "grad_norm": 2.549853583349709, + "learning_rate": 1.3060825314256556e-06, + "loss": 0.5102, + "step": 13451 + }, + { + "epoch": 0.5550878930428323, + "grad_norm": 3.5530348041575515, + "learning_rate": 1.3058837394775371e-06, + "loss": 0.4635, + "step": 13452 + }, + { + "epoch": 0.5551291573821903, + "grad_norm": 4.272107565630823, + "learning_rate": 1.305684950996821e-06, + "loss": 0.4996, + "step": 13453 + }, + { + "epoch": 0.5551704217215483, + "grad_norm": 7.181722665306302, + "learning_rate": 1.3054861659870574e-06, + "loss": 0.4635, + "step": 13454 + }, + { + "epoch": 0.5552116860609062, + "grad_norm": 6.9426034128944805, + "learning_rate": 1.3052873844517978e-06, + "loss": 0.5132, + "step": 13455 + }, + { + "epoch": 0.5552529504002641, + "grad_norm": 2.7453370973915665, + "learning_rate": 1.3050886063945926e-06, + "loss": 0.4925, + "step": 13456 + }, + { + "epoch": 0.555294214739622, + "grad_norm": 4.582443751202806, + "learning_rate": 1.3048898318189926e-06, + "loss": 0.6026, + "step": 13457 + }, + { + "epoch": 0.55533547907898, + "grad_norm": 8.496961798980914, + "learning_rate": 1.3046910607285478e-06, + "loss": 0.5078, + "step": 13458 + }, + { + "epoch": 0.5553767434183379, + "grad_norm": 1.8462930442358834, + "learning_rate": 1.3044922931268098e-06, + "loss": 0.4747, + "step": 13459 + }, + { + "epoch": 0.5554180077576958, + "grad_norm": 3.11290063068314, + "learning_rate": 1.304293529017328e-06, + "loss": 0.5403, + "step": 13460 + }, + { + "epoch": 0.5554592720970537, + "grad_norm": 5.878029386164871, + "learning_rate": 1.3040947684036544e-06, + "loss": 0.5173, + "step": 13461 + }, + { + "epoch": 0.5555005364364116, + "grad_norm": 2.8494444220077257, + "learning_rate": 1.3038960112893375e-06, + "loss": 0.5386, + "step": 13462 + }, + { + "epoch": 0.5555418007757695, + "grad_norm": 2.95153939771784, + "learning_rate": 1.3036972576779285e-06, + "loss": 0.5165, + "step": 13463 + }, + { + "epoch": 0.5555830651151276, + "grad_norm": 3.3851428466305276, + "learning_rate": 1.3034985075729774e-06, + "loss": 0.5427, + "step": 13464 + }, + { + "epoch": 0.5556243294544855, + "grad_norm": 2.463715669954635, + "learning_rate": 1.303299760978035e-06, + "loss": 0.5404, + "step": 13465 + }, + { + "epoch": 0.5556655937938434, + "grad_norm": 12.23436199641814, + "learning_rate": 1.3031010178966503e-06, + "loss": 0.5123, + "step": 13466 + }, + { + "epoch": 0.5557068581332013, + "grad_norm": 2.84887922590287, + "learning_rate": 1.3029022783323745e-06, + "loss": 0.4863, + "step": 13467 + }, + { + "epoch": 0.5557481224725592, + "grad_norm": 2.6573740609024186, + "learning_rate": 1.3027035422887567e-06, + "loss": 0.5001, + "step": 13468 + }, + { + "epoch": 0.5557893868119171, + "grad_norm": 2.5110006172762604, + "learning_rate": 1.3025048097693475e-06, + "loss": 0.5387, + "step": 13469 + }, + { + "epoch": 0.5558306511512751, + "grad_norm": 7.457257044935202, + "learning_rate": 1.302306080777696e-06, + "loss": 0.5259, + "step": 13470 + }, + { + "epoch": 0.555871915490633, + "grad_norm": 3.4205745168200994, + "learning_rate": 1.302107355317353e-06, + "loss": 0.5191, + "step": 13471 + }, + { + "epoch": 0.5559131798299909, + "grad_norm": 2.3861460213818377, + "learning_rate": 1.3019086333918674e-06, + "loss": 0.503, + "step": 13472 + }, + { + "epoch": 0.5559544441693488, + "grad_norm": 3.405539500443724, + "learning_rate": 1.3017099150047892e-06, + "loss": 0.5176, + "step": 13473 + }, + { + "epoch": 0.5559957085087067, + "grad_norm": 2.7810007389355826, + "learning_rate": 1.3015112001596684e-06, + "loss": 0.5577, + "step": 13474 + }, + { + "epoch": 0.5560369728480647, + "grad_norm": 2.8468643665918396, + "learning_rate": 1.3013124888600535e-06, + "loss": 0.5348, + "step": 13475 + }, + { + "epoch": 0.5560782371874227, + "grad_norm": 2.864152847875347, + "learning_rate": 1.301113781109495e-06, + "loss": 0.4792, + "step": 13476 + }, + { + "epoch": 0.5561195015267806, + "grad_norm": 4.430745668534745, + "learning_rate": 1.3009150769115415e-06, + "loss": 0.5512, + "step": 13477 + }, + { + "epoch": 0.5561607658661385, + "grad_norm": 2.2353461708779205, + "learning_rate": 1.300716376269743e-06, + "loss": 0.5008, + "step": 13478 + }, + { + "epoch": 0.5562020302054964, + "grad_norm": 2.3433432833378545, + "learning_rate": 1.3005176791876484e-06, + "loss": 0.5471, + "step": 13479 + }, + { + "epoch": 0.5562432945448543, + "grad_norm": 2.805621233103783, + "learning_rate": 1.300318985668807e-06, + "loss": 0.5336, + "step": 13480 + }, + { + "epoch": 0.5562845588842122, + "grad_norm": 2.4055808698157874, + "learning_rate": 1.3001202957167687e-06, + "loss": 0.4744, + "step": 13481 + }, + { + "epoch": 0.5563258232235702, + "grad_norm": 3.217392949105979, + "learning_rate": 1.2999216093350811e-06, + "loss": 0.4933, + "step": 13482 + }, + { + "epoch": 0.5563670875629281, + "grad_norm": 97.42220362599515, + "learning_rate": 1.2997229265272945e-06, + "loss": 0.5458, + "step": 13483 + }, + { + "epoch": 0.556408351902286, + "grad_norm": 2.0793618172089103, + "learning_rate": 1.2995242472969572e-06, + "loss": 0.5177, + "step": 13484 + }, + { + "epoch": 0.556449616241644, + "grad_norm": 11.758945212969483, + "learning_rate": 1.2993255716476186e-06, + "loss": 0.5486, + "step": 13485 + }, + { + "epoch": 0.5564908805810019, + "grad_norm": 4.1161646457943, + "learning_rate": 1.2991268995828271e-06, + "loss": 0.5146, + "step": 13486 + }, + { + "epoch": 0.5565321449203599, + "grad_norm": 6.268633733983689, + "learning_rate": 1.2989282311061324e-06, + "loss": 0.5372, + "step": 13487 + }, + { + "epoch": 0.5565734092597178, + "grad_norm": 1.991640805795651, + "learning_rate": 1.298729566221082e-06, + "loss": 0.4659, + "step": 13488 + }, + { + "epoch": 0.5566146735990757, + "grad_norm": 5.8164981388524435, + "learning_rate": 1.2985309049312247e-06, + "loss": 0.5184, + "step": 13489 + }, + { + "epoch": 0.5566559379384336, + "grad_norm": 3.31125916288788, + "learning_rate": 1.2983322472401098e-06, + "loss": 0.5469, + "step": 13490 + }, + { + "epoch": 0.5566972022777915, + "grad_norm": 2.7725766941839316, + "learning_rate": 1.298133593151285e-06, + "loss": 0.538, + "step": 13491 + }, + { + "epoch": 0.5567384666171494, + "grad_norm": 12.814878123055289, + "learning_rate": 1.2979349426683e-06, + "loss": 0.5147, + "step": 13492 + }, + { + "epoch": 0.5567797309565073, + "grad_norm": 2.6061910135616775, + "learning_rate": 1.2977362957947017e-06, + "loss": 0.5166, + "step": 13493 + }, + { + "epoch": 0.5568209952958653, + "grad_norm": 3.152788397084301, + "learning_rate": 1.2975376525340397e-06, + "loss": 0.5317, + "step": 13494 + }, + { + "epoch": 0.5568622596352233, + "grad_norm": 3.3968237762880666, + "learning_rate": 1.2973390128898612e-06, + "loss": 0.5335, + "step": 13495 + }, + { + "epoch": 0.5569035239745812, + "grad_norm": 2.4875756200028603, + "learning_rate": 1.2971403768657153e-06, + "loss": 0.5253, + "step": 13496 + }, + { + "epoch": 0.5569447883139391, + "grad_norm": 5.547670507546469, + "learning_rate": 1.2969417444651492e-06, + "loss": 0.5832, + "step": 13497 + }, + { + "epoch": 0.556986052653297, + "grad_norm": 4.120478620030518, + "learning_rate": 1.2967431156917121e-06, + "loss": 0.5006, + "step": 13498 + }, + { + "epoch": 0.557027316992655, + "grad_norm": 3.020054376295778, + "learning_rate": 1.296544490548951e-06, + "loss": 0.4973, + "step": 13499 + }, + { + "epoch": 0.5570685813320129, + "grad_norm": 2.616773290345909, + "learning_rate": 1.2963458690404154e-06, + "loss": 0.5318, + "step": 13500 + }, + { + "epoch": 0.5571098456713708, + "grad_norm": 15.341487469603306, + "learning_rate": 1.296147251169651e-06, + "loss": 0.5686, + "step": 13501 + }, + { + "epoch": 0.5571511100107287, + "grad_norm": 4.6427256677985485, + "learning_rate": 1.2959486369402068e-06, + "loss": 0.5563, + "step": 13502 + }, + { + "epoch": 0.5571923743500866, + "grad_norm": 2.8384952697123578, + "learning_rate": 1.2957500263556303e-06, + "loss": 0.5757, + "step": 13503 + }, + { + "epoch": 0.5572336386894445, + "grad_norm": 5.442071319303675, + "learning_rate": 1.2955514194194696e-06, + "loss": 0.5026, + "step": 13504 + }, + { + "epoch": 0.5572749030288026, + "grad_norm": 2.3604368896377483, + "learning_rate": 1.2953528161352717e-06, + "loss": 0.5553, + "step": 13505 + }, + { + "epoch": 0.5573161673681605, + "grad_norm": 6.153300518013598, + "learning_rate": 1.2951542165065844e-06, + "loss": 0.5119, + "step": 13506 + }, + { + "epoch": 0.5573574317075184, + "grad_norm": 3.0742868450067387, + "learning_rate": 1.2949556205369554e-06, + "loss": 0.581, + "step": 13507 + }, + { + "epoch": 0.5573986960468763, + "grad_norm": 2.3151613683506462, + "learning_rate": 1.294757028229932e-06, + "loss": 0.5298, + "step": 13508 + }, + { + "epoch": 0.5574399603862342, + "grad_norm": 2.3392173351435037, + "learning_rate": 1.2945584395890612e-06, + "loss": 0.5547, + "step": 13509 + }, + { + "epoch": 0.5574812247255921, + "grad_norm": 3.2007791323594574, + "learning_rate": 1.2943598546178912e-06, + "loss": 0.4856, + "step": 13510 + }, + { + "epoch": 0.55752248906495, + "grad_norm": 2.4101970310686864, + "learning_rate": 1.294161273319968e-06, + "loss": 0.483, + "step": 13511 + }, + { + "epoch": 0.557563753404308, + "grad_norm": 3.0792056648719615, + "learning_rate": 1.2939626956988398e-06, + "loss": 0.5056, + "step": 13512 + }, + { + "epoch": 0.5576050177436659, + "grad_norm": 4.569754659633555, + "learning_rate": 1.293764121758053e-06, + "loss": 0.51, + "step": 13513 + }, + { + "epoch": 0.5576462820830238, + "grad_norm": 1.8665994185731216, + "learning_rate": 1.2935655515011554e-06, + "loss": 0.5274, + "step": 13514 + }, + { + "epoch": 0.5576875464223818, + "grad_norm": 5.866838451444828, + "learning_rate": 1.2933669849316933e-06, + "loss": 0.5665, + "step": 13515 + }, + { + "epoch": 0.5577288107617397, + "grad_norm": 2.306697337192561, + "learning_rate": 1.2931684220532135e-06, + "loss": 0.5115, + "step": 13516 + }, + { + "epoch": 0.5577700751010977, + "grad_norm": 2.9346622891991974, + "learning_rate": 1.2929698628692634e-06, + "loss": 0.5675, + "step": 13517 + }, + { + "epoch": 0.5578113394404556, + "grad_norm": 5.000757674024586, + "learning_rate": 1.2927713073833895e-06, + "loss": 0.5012, + "step": 13518 + }, + { + "epoch": 0.5578526037798135, + "grad_norm": 2.1842772840500895, + "learning_rate": 1.2925727555991378e-06, + "loss": 0.4844, + "step": 13519 + }, + { + "epoch": 0.5578938681191714, + "grad_norm": 3.4171098190636164, + "learning_rate": 1.2923742075200562e-06, + "loss": 0.5211, + "step": 13520 + }, + { + "epoch": 0.5579351324585293, + "grad_norm": 2.3864152575287223, + "learning_rate": 1.2921756631496902e-06, + "loss": 0.52, + "step": 13521 + }, + { + "epoch": 0.5579763967978872, + "grad_norm": 2.473007050343888, + "learning_rate": 1.2919771224915873e-06, + "loss": 0.4729, + "step": 13522 + }, + { + "epoch": 0.5580176611372452, + "grad_norm": 3.2509514834516837, + "learning_rate": 1.2917785855492928e-06, + "loss": 0.5236, + "step": 13523 + }, + { + "epoch": 0.5580589254766031, + "grad_norm": 3.0508557775502623, + "learning_rate": 1.291580052326354e-06, + "loss": 0.4774, + "step": 13524 + }, + { + "epoch": 0.5581001898159611, + "grad_norm": 2.754954772624423, + "learning_rate": 1.2913815228263168e-06, + "loss": 0.49, + "step": 13525 + }, + { + "epoch": 0.558141454155319, + "grad_norm": 2.2720438389837048, + "learning_rate": 1.2911829970527277e-06, + "loss": 0.5035, + "step": 13526 + }, + { + "epoch": 0.5581827184946769, + "grad_norm": 4.949127427480827, + "learning_rate": 1.2909844750091328e-06, + "loss": 0.595, + "step": 13527 + }, + { + "epoch": 0.5582239828340348, + "grad_norm": 3.0464698964479937, + "learning_rate": 1.2907859566990775e-06, + "loss": 0.5418, + "step": 13528 + }, + { + "epoch": 0.5582652471733928, + "grad_norm": 3.037435141736014, + "learning_rate": 1.2905874421261088e-06, + "loss": 0.5612, + "step": 13529 + }, + { + "epoch": 0.5583065115127507, + "grad_norm": 4.838245194263671, + "learning_rate": 1.2903889312937717e-06, + "loss": 0.5426, + "step": 13530 + }, + { + "epoch": 0.5583477758521086, + "grad_norm": 2.0081877609257943, + "learning_rate": 1.2901904242056132e-06, + "loss": 0.5308, + "step": 13531 + }, + { + "epoch": 0.5583890401914665, + "grad_norm": 19.323523008805978, + "learning_rate": 1.2899919208651781e-06, + "loss": 0.5509, + "step": 13532 + }, + { + "epoch": 0.5584303045308244, + "grad_norm": 2.2863746149356707, + "learning_rate": 1.2897934212760132e-06, + "loss": 0.5226, + "step": 13533 + }, + { + "epoch": 0.5584715688701823, + "grad_norm": 2.5940079975116026, + "learning_rate": 1.2895949254416631e-06, + "loss": 0.4452, + "step": 13534 + }, + { + "epoch": 0.5585128332095403, + "grad_norm": 3.4778588542633075, + "learning_rate": 1.2893964333656745e-06, + "loss": 0.4615, + "step": 13535 + }, + { + "epoch": 0.5585540975488983, + "grad_norm": 3.2516930245479716, + "learning_rate": 1.2891979450515919e-06, + "loss": 0.5434, + "step": 13536 + }, + { + "epoch": 0.5585953618882562, + "grad_norm": 2.800690624330414, + "learning_rate": 1.2889994605029618e-06, + "loss": 0.5368, + "step": 13537 + }, + { + "epoch": 0.5586366262276141, + "grad_norm": 2.9644261779836083, + "learning_rate": 1.288800979723329e-06, + "loss": 0.524, + "step": 13538 + }, + { + "epoch": 0.558677890566972, + "grad_norm": 3.796599457070258, + "learning_rate": 1.288602502716239e-06, + "loss": 0.5223, + "step": 13539 + }, + { + "epoch": 0.55871915490633, + "grad_norm": 3.87692088986848, + "learning_rate": 1.2884040294852378e-06, + "loss": 0.547, + "step": 13540 + }, + { + "epoch": 0.5587604192456879, + "grad_norm": 2.8018214001947435, + "learning_rate": 1.2882055600338695e-06, + "loss": 0.5562, + "step": 13541 + }, + { + "epoch": 0.5588016835850458, + "grad_norm": 3.2666785951435093, + "learning_rate": 1.2880070943656795e-06, + "loss": 0.5629, + "step": 13542 + }, + { + "epoch": 0.5588429479244037, + "grad_norm": 1.8466507933563474, + "learning_rate": 1.2878086324842131e-06, + "loss": 0.4682, + "step": 13543 + }, + { + "epoch": 0.5588842122637616, + "grad_norm": 2.6900251074758383, + "learning_rate": 1.2876101743930155e-06, + "loss": 0.523, + "step": 13544 + }, + { + "epoch": 0.5589254766031195, + "grad_norm": 2.1092441151887558, + "learning_rate": 1.2874117200956316e-06, + "loss": 0.4995, + "step": 13545 + }, + { + "epoch": 0.5589667409424776, + "grad_norm": 2.4723220267667556, + "learning_rate": 1.2872132695956057e-06, + "loss": 0.5378, + "step": 13546 + }, + { + "epoch": 0.5590080052818355, + "grad_norm": 1.8249108216955083, + "learning_rate": 1.2870148228964837e-06, + "loss": 0.4794, + "step": 13547 + }, + { + "epoch": 0.5590492696211934, + "grad_norm": 42.5453271471381, + "learning_rate": 1.2868163800018095e-06, + "loss": 0.4985, + "step": 13548 + }, + { + "epoch": 0.5590905339605513, + "grad_norm": 3.56392282210379, + "learning_rate": 1.2866179409151282e-06, + "loss": 0.4793, + "step": 13549 + }, + { + "epoch": 0.5591317982999092, + "grad_norm": 2.674530922553951, + "learning_rate": 1.2864195056399842e-06, + "loss": 0.5138, + "step": 13550 + }, + { + "epoch": 0.5591730626392671, + "grad_norm": 1.971366429293557, + "learning_rate": 1.2862210741799222e-06, + "loss": 0.5399, + "step": 13551 + }, + { + "epoch": 0.559214326978625, + "grad_norm": 2.1819332530146354, + "learning_rate": 1.2860226465384862e-06, + "loss": 0.5365, + "step": 13552 + }, + { + "epoch": 0.559255591317983, + "grad_norm": 2.1113525367618164, + "learning_rate": 1.2858242227192216e-06, + "loss": 0.522, + "step": 13553 + }, + { + "epoch": 0.5592968556573409, + "grad_norm": 2.3493762701988827, + "learning_rate": 1.2856258027256722e-06, + "loss": 0.6194, + "step": 13554 + }, + { + "epoch": 0.5593381199966988, + "grad_norm": 5.1735830511792935, + "learning_rate": 1.285427386561382e-06, + "loss": 0.4813, + "step": 13555 + }, + { + "epoch": 0.5593793843360568, + "grad_norm": 2.5266815132793154, + "learning_rate": 1.2852289742298954e-06, + "loss": 0.4708, + "step": 13556 + }, + { + "epoch": 0.5594206486754147, + "grad_norm": 3.850514031595986, + "learning_rate": 1.2850305657347568e-06, + "loss": 0.5596, + "step": 13557 + }, + { + "epoch": 0.5594619130147727, + "grad_norm": 4.293106457307282, + "learning_rate": 1.2848321610795096e-06, + "loss": 0.5869, + "step": 13558 + }, + { + "epoch": 0.5595031773541306, + "grad_norm": 2.028426841336985, + "learning_rate": 1.2846337602676986e-06, + "loss": 0.45, + "step": 13559 + }, + { + "epoch": 0.5595444416934885, + "grad_norm": 2.9174984674313627, + "learning_rate": 1.2844353633028672e-06, + "loss": 0.4863, + "step": 13560 + }, + { + "epoch": 0.5595857060328464, + "grad_norm": 2.909161562444541, + "learning_rate": 1.28423697018856e-06, + "loss": 0.5117, + "step": 13561 + }, + { + "epoch": 0.5596269703722043, + "grad_norm": 2.8105809764437613, + "learning_rate": 1.2840385809283196e-06, + "loss": 0.5237, + "step": 13562 + }, + { + "epoch": 0.5596682347115622, + "grad_norm": 9.87763493427542, + "learning_rate": 1.2838401955256908e-06, + "loss": 0.4943, + "step": 13563 + }, + { + "epoch": 0.5597094990509202, + "grad_norm": 2.5375657299644074, + "learning_rate": 1.2836418139842165e-06, + "loss": 0.48, + "step": 13564 + }, + { + "epoch": 0.5597507633902781, + "grad_norm": 2.458437960510282, + "learning_rate": 1.283443436307441e-06, + "loss": 0.487, + "step": 13565 + }, + { + "epoch": 0.5597920277296361, + "grad_norm": 3.242295379309294, + "learning_rate": 1.283245062498908e-06, + "loss": 0.5561, + "step": 13566 + }, + { + "epoch": 0.559833292068994, + "grad_norm": 2.643757457583533, + "learning_rate": 1.2830466925621595e-06, + "loss": 0.4923, + "step": 13567 + }, + { + "epoch": 0.5598745564083519, + "grad_norm": 3.0263374536961596, + "learning_rate": 1.28284832650074e-06, + "loss": 0.5324, + "step": 13568 + }, + { + "epoch": 0.5599158207477098, + "grad_norm": 25.14278524389892, + "learning_rate": 1.2826499643181928e-06, + "loss": 0.5528, + "step": 13569 + }, + { + "epoch": 0.5599570850870678, + "grad_norm": 9.546683107507961, + "learning_rate": 1.2824516060180609e-06, + "loss": 0.5276, + "step": 13570 + }, + { + "epoch": 0.5599983494264257, + "grad_norm": 1.691636621774011, + "learning_rate": 1.2822532516038874e-06, + "loss": 0.4634, + "step": 13571 + }, + { + "epoch": 0.5600396137657836, + "grad_norm": 2.390449451134478, + "learning_rate": 1.282054901079216e-06, + "loss": 0.5503, + "step": 13572 + }, + { + "epoch": 0.5600808781051415, + "grad_norm": 3.6367098961858635, + "learning_rate": 1.2818565544475884e-06, + "loss": 0.5085, + "step": 13573 + }, + { + "epoch": 0.5601221424444994, + "grad_norm": 2.2215085192982134, + "learning_rate": 1.2816582117125492e-06, + "loss": 0.5862, + "step": 13574 + }, + { + "epoch": 0.5601634067838573, + "grad_norm": 2.5047243904893417, + "learning_rate": 1.2814598728776401e-06, + "loss": 0.5802, + "step": 13575 + }, + { + "epoch": 0.5602046711232154, + "grad_norm": 2.352466176129043, + "learning_rate": 1.2812615379464048e-06, + "loss": 0.5195, + "step": 13576 + }, + { + "epoch": 0.5602459354625733, + "grad_norm": 2.11365168020558, + "learning_rate": 1.2810632069223853e-06, + "loss": 0.5062, + "step": 13577 + }, + { + "epoch": 0.5602871998019312, + "grad_norm": 2.4543764651104008, + "learning_rate": 1.280864879809125e-06, + "loss": 0.5064, + "step": 13578 + }, + { + "epoch": 0.5603284641412891, + "grad_norm": 8.569230647076001, + "learning_rate": 1.2806665566101662e-06, + "loss": 0.5291, + "step": 13579 + }, + { + "epoch": 0.560369728480647, + "grad_norm": 2.4987634364919726, + "learning_rate": 1.2804682373290513e-06, + "loss": 0.5026, + "step": 13580 + }, + { + "epoch": 0.560410992820005, + "grad_norm": 2.0547128763588867, + "learning_rate": 1.2802699219693224e-06, + "loss": 0.5122, + "step": 13581 + }, + { + "epoch": 0.5604522571593629, + "grad_norm": 12.887130257478224, + "learning_rate": 1.2800716105345229e-06, + "loss": 0.4826, + "step": 13582 + }, + { + "epoch": 0.5604935214987208, + "grad_norm": 5.207173259806093, + "learning_rate": 1.279873303028194e-06, + "loss": 0.5045, + "step": 13583 + }, + { + "epoch": 0.5605347858380787, + "grad_norm": 3.112199004851054, + "learning_rate": 1.279674999453879e-06, + "loss": 0.5183, + "step": 13584 + }, + { + "epoch": 0.5605760501774366, + "grad_norm": 1.9605636727700093, + "learning_rate": 1.2794766998151196e-06, + "loss": 0.5059, + "step": 13585 + }, + { + "epoch": 0.5606173145167946, + "grad_norm": 2.18109177271667, + "learning_rate": 1.279278404115458e-06, + "loss": 0.5417, + "step": 13586 + }, + { + "epoch": 0.5606585788561526, + "grad_norm": 2.7342349548099443, + "learning_rate": 1.2790801123584362e-06, + "loss": 0.5727, + "step": 13587 + }, + { + "epoch": 0.5606998431955105, + "grad_norm": 2.2361003663783703, + "learning_rate": 1.2788818245475965e-06, + "loss": 0.54, + "step": 13588 + }, + { + "epoch": 0.5607411075348684, + "grad_norm": 2.6524442510897623, + "learning_rate": 1.2786835406864801e-06, + "loss": 0.5151, + "step": 13589 + }, + { + "epoch": 0.5607823718742263, + "grad_norm": 5.335772403064277, + "learning_rate": 1.2784852607786298e-06, + "loss": 0.5476, + "step": 13590 + }, + { + "epoch": 0.5608236362135842, + "grad_norm": 2.561206722562619, + "learning_rate": 1.2782869848275862e-06, + "loss": 0.5322, + "step": 13591 + }, + { + "epoch": 0.5608649005529421, + "grad_norm": 10.426056635954438, + "learning_rate": 1.278088712836893e-06, + "loss": 0.538, + "step": 13592 + }, + { + "epoch": 0.5609061648923, + "grad_norm": 4.932015553903052, + "learning_rate": 1.2778904448100898e-06, + "loss": 0.4676, + "step": 13593 + }, + { + "epoch": 0.560947429231658, + "grad_norm": 2.5202102906051023, + "learning_rate": 1.2776921807507184e-06, + "loss": 0.4664, + "step": 13594 + }, + { + "epoch": 0.5609886935710159, + "grad_norm": 2.5116706250585628, + "learning_rate": 1.2774939206623215e-06, + "loss": 0.5264, + "step": 13595 + }, + { + "epoch": 0.5610299579103738, + "grad_norm": 21.74523505179791, + "learning_rate": 1.2772956645484394e-06, + "loss": 0.5186, + "step": 13596 + }, + { + "epoch": 0.5610712222497318, + "grad_norm": 4.49981953093033, + "learning_rate": 1.2770974124126138e-06, + "loss": 0.5165, + "step": 13597 + }, + { + "epoch": 0.5611124865890897, + "grad_norm": 3.075691736429768, + "learning_rate": 1.2768991642583863e-06, + "loss": 0.4807, + "step": 13598 + }, + { + "epoch": 0.5611537509284477, + "grad_norm": 4.415640694982318, + "learning_rate": 1.2767009200892975e-06, + "loss": 0.5487, + "step": 13599 + }, + { + "epoch": 0.5611950152678056, + "grad_norm": 2.635213594124913, + "learning_rate": 1.2765026799088893e-06, + "loss": 0.523, + "step": 13600 + }, + { + "epoch": 0.5612362796071635, + "grad_norm": 2.4256985089035754, + "learning_rate": 1.2763044437207019e-06, + "loss": 0.5396, + "step": 13601 + }, + { + "epoch": 0.5612775439465214, + "grad_norm": 4.013073640508233, + "learning_rate": 1.276106211528277e-06, + "loss": 0.5419, + "step": 13602 + }, + { + "epoch": 0.5613188082858793, + "grad_norm": 21.561763999747377, + "learning_rate": 1.275907983335155e-06, + "loss": 0.5159, + "step": 13603 + }, + { + "epoch": 0.5613600726252372, + "grad_norm": 4.001476692469464, + "learning_rate": 1.2757097591448774e-06, + "loss": 0.489, + "step": 13604 + }, + { + "epoch": 0.5614013369645952, + "grad_norm": 2.2781461389439635, + "learning_rate": 1.2755115389609841e-06, + "loss": 0.4763, + "step": 13605 + }, + { + "epoch": 0.5614426013039531, + "grad_norm": 3.3144190023923517, + "learning_rate": 1.2753133227870172e-06, + "loss": 0.5613, + "step": 13606 + }, + { + "epoch": 0.5614838656433111, + "grad_norm": 2.4687748286627773, + "learning_rate": 1.275115110626516e-06, + "loss": 0.4844, + "step": 13607 + }, + { + "epoch": 0.561525129982669, + "grad_norm": 2.250674404009411, + "learning_rate": 1.2749169024830211e-06, + "loss": 0.5215, + "step": 13608 + }, + { + "epoch": 0.5615663943220269, + "grad_norm": 3.065594305786111, + "learning_rate": 1.2747186983600738e-06, + "loss": 0.5223, + "step": 13609 + }, + { + "epoch": 0.5616076586613848, + "grad_norm": 3.2548330915953807, + "learning_rate": 1.2745204982612138e-06, + "loss": 0.502, + "step": 13610 + }, + { + "epoch": 0.5616489230007428, + "grad_norm": 18.32711013572451, + "learning_rate": 1.2743223021899821e-06, + "loss": 0.5689, + "step": 13611 + }, + { + "epoch": 0.5616901873401007, + "grad_norm": 15.776527038785302, + "learning_rate": 1.274124110149918e-06, + "loss": 0.5074, + "step": 13612 + }, + { + "epoch": 0.5617314516794586, + "grad_norm": 2.60304054134293, + "learning_rate": 1.2739259221445631e-06, + "loss": 0.5074, + "step": 13613 + }, + { + "epoch": 0.5617727160188165, + "grad_norm": 3.711474209850937, + "learning_rate": 1.2737277381774562e-06, + "loss": 0.5867, + "step": 13614 + }, + { + "epoch": 0.5618139803581744, + "grad_norm": 3.6118903908240365, + "learning_rate": 1.2735295582521387e-06, + "loss": 0.5485, + "step": 13615 + }, + { + "epoch": 0.5618552446975323, + "grad_norm": 6.579207511222821, + "learning_rate": 1.2733313823721489e-06, + "loss": 0.5375, + "step": 13616 + }, + { + "epoch": 0.5618965090368904, + "grad_norm": 3.0460766899044405, + "learning_rate": 1.2731332105410282e-06, + "loss": 0.4868, + "step": 13617 + }, + { + "epoch": 0.5619377733762483, + "grad_norm": 2.2835344237439363, + "learning_rate": 1.272935042762316e-06, + "loss": 0.5319, + "step": 13618 + }, + { + "epoch": 0.5619790377156062, + "grad_norm": 2.4554911932327874, + "learning_rate": 1.2727368790395524e-06, + "loss": 0.5329, + "step": 13619 + }, + { + "epoch": 0.5620203020549641, + "grad_norm": 3.294713217933827, + "learning_rate": 1.2725387193762756e-06, + "loss": 0.5333, + "step": 13620 + }, + { + "epoch": 0.562061566394322, + "grad_norm": 6.427130412635936, + "learning_rate": 1.272340563776027e-06, + "loss": 0.5205, + "step": 13621 + }, + { + "epoch": 0.56210283073368, + "grad_norm": 3.9784628752341478, + "learning_rate": 1.272142412242345e-06, + "loss": 0.4797, + "step": 13622 + }, + { + "epoch": 0.5621440950730379, + "grad_norm": 6.298071210096439, + "learning_rate": 1.2719442647787698e-06, + "loss": 0.5174, + "step": 13623 + }, + { + "epoch": 0.5621853594123958, + "grad_norm": 2.181354831083436, + "learning_rate": 1.2717461213888404e-06, + "loss": 0.5332, + "step": 13624 + }, + { + "epoch": 0.5622266237517537, + "grad_norm": 2.9297160711068866, + "learning_rate": 1.2715479820760962e-06, + "loss": 0.4816, + "step": 13625 + }, + { + "epoch": 0.5622678880911116, + "grad_norm": 8.5704086764021, + "learning_rate": 1.2713498468440766e-06, + "loss": 0.4959, + "step": 13626 + }, + { + "epoch": 0.5623091524304696, + "grad_norm": 2.8944885793546153, + "learning_rate": 1.2711517156963209e-06, + "loss": 0.547, + "step": 13627 + }, + { + "epoch": 0.5623504167698276, + "grad_norm": 2.7521631431937457, + "learning_rate": 1.2709535886363678e-06, + "loss": 0.5002, + "step": 13628 + }, + { + "epoch": 0.5623916811091855, + "grad_norm": 3.955724144373979, + "learning_rate": 1.2707554656677568e-06, + "loss": 0.4877, + "step": 13629 + }, + { + "epoch": 0.5624329454485434, + "grad_norm": 2.71600194551934, + "learning_rate": 1.2705573467940265e-06, + "loss": 0.5245, + "step": 13630 + }, + { + "epoch": 0.5624742097879013, + "grad_norm": 2.3452816343685434, + "learning_rate": 1.2703592320187161e-06, + "loss": 0.5078, + "step": 13631 + }, + { + "epoch": 0.5625154741272592, + "grad_norm": 4.824644498314683, + "learning_rate": 1.2701611213453646e-06, + "loss": 0.5373, + "step": 13632 + }, + { + "epoch": 0.5625567384666171, + "grad_norm": 14.830007413378866, + "learning_rate": 1.2699630147775102e-06, + "loss": 0.5738, + "step": 13633 + }, + { + "epoch": 0.562598002805975, + "grad_norm": 3.2495886759070673, + "learning_rate": 1.2697649123186916e-06, + "loss": 0.5142, + "step": 13634 + }, + { + "epoch": 0.562639267145333, + "grad_norm": 2.797527952019211, + "learning_rate": 1.269566813972448e-06, + "loss": 0.5529, + "step": 13635 + }, + { + "epoch": 0.5626805314846909, + "grad_norm": 2.20500639221472, + "learning_rate": 1.269368719742317e-06, + "loss": 0.4861, + "step": 13636 + }, + { + "epoch": 0.5627217958240489, + "grad_norm": 1.603266705179224, + "learning_rate": 1.269170629631838e-06, + "loss": 0.4817, + "step": 13637 + }, + { + "epoch": 0.5627630601634068, + "grad_norm": 2.3332709654824626, + "learning_rate": 1.2689725436445487e-06, + "loss": 0.4823, + "step": 13638 + }, + { + "epoch": 0.5628043245027647, + "grad_norm": 2.434819734186904, + "learning_rate": 1.2687744617839883e-06, + "loss": 0.4836, + "step": 13639 + }, + { + "epoch": 0.5628455888421227, + "grad_norm": 3.2163478798678655, + "learning_rate": 1.2685763840536937e-06, + "loss": 0.5632, + "step": 13640 + }, + { + "epoch": 0.5628868531814806, + "grad_norm": 3.5554662664001797, + "learning_rate": 1.2683783104572043e-06, + "loss": 0.5034, + "step": 13641 + }, + { + "epoch": 0.5629281175208385, + "grad_norm": 3.3114569421168896, + "learning_rate": 1.2681802409980575e-06, + "loss": 0.5369, + "step": 13642 + }, + { + "epoch": 0.5629693818601964, + "grad_norm": 2.1968447889094675, + "learning_rate": 1.2679821756797918e-06, + "loss": 0.5313, + "step": 13643 + }, + { + "epoch": 0.5630106461995543, + "grad_norm": 2.421331024341106, + "learning_rate": 1.2677841145059444e-06, + "loss": 0.5139, + "step": 13644 + }, + { + "epoch": 0.5630519105389122, + "grad_norm": 3.1673722639464157, + "learning_rate": 1.2675860574800543e-06, + "loss": 0.4835, + "step": 13645 + }, + { + "epoch": 0.5630931748782702, + "grad_norm": 2.199908938596461, + "learning_rate": 1.2673880046056584e-06, + "loss": 0.5031, + "step": 13646 + }, + { + "epoch": 0.5631344392176282, + "grad_norm": 3.8200629916650395, + "learning_rate": 1.2671899558862942e-06, + "loss": 0.5789, + "step": 13647 + }, + { + "epoch": 0.5631757035569861, + "grad_norm": 5.041018789783618, + "learning_rate": 1.2669919113255005e-06, + "loss": 0.5248, + "step": 13648 + }, + { + "epoch": 0.563216967896344, + "grad_norm": 3.961122961676829, + "learning_rate": 1.2667938709268134e-06, + "loss": 0.5423, + "step": 13649 + }, + { + "epoch": 0.5632582322357019, + "grad_norm": 4.146577951126964, + "learning_rate": 1.2665958346937717e-06, + "loss": 0.4929, + "step": 13650 + }, + { + "epoch": 0.5632994965750598, + "grad_norm": 2.6800259906883124, + "learning_rate": 1.2663978026299116e-06, + "loss": 0.6012, + "step": 13651 + }, + { + "epoch": 0.5633407609144178, + "grad_norm": 3.681225860923268, + "learning_rate": 1.2661997747387718e-06, + "loss": 0.4664, + "step": 13652 + }, + { + "epoch": 0.5633820252537757, + "grad_norm": 3.0804980647082263, + "learning_rate": 1.2660017510238886e-06, + "loss": 0.5491, + "step": 13653 + }, + { + "epoch": 0.5634232895931336, + "grad_norm": 2.6017283380507186, + "learning_rate": 1.2658037314887997e-06, + "loss": 0.526, + "step": 13654 + }, + { + "epoch": 0.5634645539324915, + "grad_norm": 2.1998353494021, + "learning_rate": 1.2656057161370417e-06, + "loss": 0.5125, + "step": 13655 + }, + { + "epoch": 0.5635058182718494, + "grad_norm": 2.4121732714581934, + "learning_rate": 1.2654077049721524e-06, + "loss": 0.5051, + "step": 13656 + }, + { + "epoch": 0.5635470826112073, + "grad_norm": 2.356285665275204, + "learning_rate": 1.2652096979976681e-06, + "loss": 0.5097, + "step": 13657 + }, + { + "epoch": 0.5635883469505654, + "grad_norm": 8.337389321971077, + "learning_rate": 1.2650116952171267e-06, + "loss": 0.4853, + "step": 13658 + }, + { + "epoch": 0.5636296112899233, + "grad_norm": 1.7803352664573022, + "learning_rate": 1.2648136966340633e-06, + "loss": 0.5637, + "step": 13659 + }, + { + "epoch": 0.5636708756292812, + "grad_norm": 2.7575271598288897, + "learning_rate": 1.264615702252016e-06, + "loss": 0.4483, + "step": 13660 + }, + { + "epoch": 0.5637121399686391, + "grad_norm": 2.9893351186333263, + "learning_rate": 1.2644177120745207e-06, + "loss": 0.5743, + "step": 13661 + }, + { + "epoch": 0.563753404307997, + "grad_norm": 7.703022921698869, + "learning_rate": 1.264219726105115e-06, + "loss": 0.5203, + "step": 13662 + }, + { + "epoch": 0.563794668647355, + "grad_norm": 2.889245425461844, + "learning_rate": 1.2640217443473343e-06, + "loss": 0.5644, + "step": 13663 + }, + { + "epoch": 0.5638359329867129, + "grad_norm": 2.991954110310081, + "learning_rate": 1.263823766804716e-06, + "loss": 0.4982, + "step": 13664 + }, + { + "epoch": 0.5638771973260708, + "grad_norm": 2.251213117523682, + "learning_rate": 1.2636257934807955e-06, + "loss": 0.5131, + "step": 13665 + }, + { + "epoch": 0.5639184616654287, + "grad_norm": 2.7308171852335446, + "learning_rate": 1.2634278243791101e-06, + "loss": 0.5496, + "step": 13666 + }, + { + "epoch": 0.5639597260047866, + "grad_norm": 2.0964013782946846, + "learning_rate": 1.2632298595031952e-06, + "loss": 0.4648, + "step": 13667 + }, + { + "epoch": 0.5640009903441446, + "grad_norm": 3.3901490676606367, + "learning_rate": 1.2630318988565878e-06, + "loss": 0.5407, + "step": 13668 + }, + { + "epoch": 0.5640422546835026, + "grad_norm": 9.72284981979036, + "learning_rate": 1.262833942442823e-06, + "loss": 0.5042, + "step": 13669 + }, + { + "epoch": 0.5640835190228605, + "grad_norm": 1.8026307753616133, + "learning_rate": 1.2626359902654378e-06, + "loss": 0.5152, + "step": 13670 + }, + { + "epoch": 0.5641247833622184, + "grad_norm": 5.155731448938354, + "learning_rate": 1.2624380423279677e-06, + "loss": 0.4873, + "step": 13671 + }, + { + "epoch": 0.5641660477015763, + "grad_norm": 3.090043466420569, + "learning_rate": 1.2622400986339478e-06, + "loss": 0.5322, + "step": 13672 + }, + { + "epoch": 0.5642073120409342, + "grad_norm": 2.193034892727083, + "learning_rate": 1.262042159186915e-06, + "loss": 0.4837, + "step": 13673 + }, + { + "epoch": 0.5642485763802921, + "grad_norm": 11.24967797020441, + "learning_rate": 1.2618442239904047e-06, + "loss": 0.5248, + "step": 13674 + }, + { + "epoch": 0.56428984071965, + "grad_norm": 9.508440848194963, + "learning_rate": 1.2616462930479516e-06, + "loss": 0.4991, + "step": 13675 + }, + { + "epoch": 0.564331105059008, + "grad_norm": 2.0782340825560413, + "learning_rate": 1.2614483663630925e-06, + "loss": 0.5458, + "step": 13676 + }, + { + "epoch": 0.5643723693983659, + "grad_norm": 2.6642271889343085, + "learning_rate": 1.2612504439393619e-06, + "loss": 0.5702, + "step": 13677 + }, + { + "epoch": 0.5644136337377239, + "grad_norm": 3.2322917417755423, + "learning_rate": 1.2610525257802962e-06, + "loss": 0.5315, + "step": 13678 + }, + { + "epoch": 0.5644548980770818, + "grad_norm": 3.5667021979035822, + "learning_rate": 1.2608546118894298e-06, + "loss": 0.5123, + "step": 13679 + }, + { + "epoch": 0.5644961624164397, + "grad_norm": 3.21066044713295, + "learning_rate": 1.2606567022702983e-06, + "loss": 0.4853, + "step": 13680 + }, + { + "epoch": 0.5645374267557977, + "grad_norm": 2.248790637326735, + "learning_rate": 1.2604587969264366e-06, + "loss": 0.5259, + "step": 13681 + }, + { + "epoch": 0.5645786910951556, + "grad_norm": 4.897943912539106, + "learning_rate": 1.2602608958613806e-06, + "loss": 0.5153, + "step": 13682 + }, + { + "epoch": 0.5646199554345135, + "grad_norm": 2.132184206341918, + "learning_rate": 1.2600629990786643e-06, + "loss": 0.5437, + "step": 13683 + }, + { + "epoch": 0.5646612197738714, + "grad_norm": 4.674598144378476, + "learning_rate": 1.2598651065818236e-06, + "loss": 0.562, + "step": 13684 + }, + { + "epoch": 0.5647024841132293, + "grad_norm": 5.297260468658786, + "learning_rate": 1.2596672183743927e-06, + "loss": 0.5039, + "step": 13685 + }, + { + "epoch": 0.5647437484525872, + "grad_norm": 3.157101708554286, + "learning_rate": 1.259469334459906e-06, + "loss": 0.5159, + "step": 13686 + }, + { + "epoch": 0.5647850127919452, + "grad_norm": 2.490195915048819, + "learning_rate": 1.259271454841899e-06, + "loss": 0.4577, + "step": 13687 + }, + { + "epoch": 0.5648262771313032, + "grad_norm": 4.7131306229655445, + "learning_rate": 1.2590735795239059e-06, + "loss": 0.5314, + "step": 13688 + }, + { + "epoch": 0.5648675414706611, + "grad_norm": 2.400906843044837, + "learning_rate": 1.2588757085094616e-06, + "loss": 0.4722, + "step": 13689 + }, + { + "epoch": 0.564908805810019, + "grad_norm": 7.113527867883462, + "learning_rate": 1.2586778418020999e-06, + "loss": 0.4779, + "step": 13690 + }, + { + "epoch": 0.5649500701493769, + "grad_norm": 2.7998344366929597, + "learning_rate": 1.258479979405356e-06, + "loss": 0.5295, + "step": 13691 + }, + { + "epoch": 0.5649913344887348, + "grad_norm": 2.4435613595940486, + "learning_rate": 1.2582821213227637e-06, + "loss": 0.5367, + "step": 13692 + }, + { + "epoch": 0.5650325988280928, + "grad_norm": 3.8723443029080644, + "learning_rate": 1.2580842675578577e-06, + "loss": 0.5393, + "step": 13693 + }, + { + "epoch": 0.5650738631674507, + "grad_norm": 5.250849474040864, + "learning_rate": 1.2578864181141713e-06, + "loss": 0.4851, + "step": 13694 + }, + { + "epoch": 0.5651151275068086, + "grad_norm": 4.9688586499835425, + "learning_rate": 1.2576885729952397e-06, + "loss": 0.546, + "step": 13695 + }, + { + "epoch": 0.5651563918461665, + "grad_norm": 7.039770765842633, + "learning_rate": 1.2574907322045962e-06, + "loss": 0.5446, + "step": 13696 + }, + { + "epoch": 0.5651976561855244, + "grad_norm": 1.851333407851463, + "learning_rate": 1.2572928957457748e-06, + "loss": 0.4628, + "step": 13697 + }, + { + "epoch": 0.5652389205248824, + "grad_norm": 2.0398997811792756, + "learning_rate": 1.2570950636223098e-06, + "loss": 0.5209, + "step": 13698 + }, + { + "epoch": 0.5652801848642404, + "grad_norm": 4.863699279617372, + "learning_rate": 1.2568972358377347e-06, + "loss": 0.5425, + "step": 13699 + }, + { + "epoch": 0.5653214492035983, + "grad_norm": 3.89856695295177, + "learning_rate": 1.2566994123955823e-06, + "loss": 0.4967, + "step": 13700 + }, + { + "epoch": 0.5653627135429562, + "grad_norm": 2.4384999125804137, + "learning_rate": 1.2565015932993877e-06, + "loss": 0.4815, + "step": 13701 + }, + { + "epoch": 0.5654039778823141, + "grad_norm": 3.7818890989710607, + "learning_rate": 1.2563037785526834e-06, + "loss": 0.5167, + "step": 13702 + }, + { + "epoch": 0.565445242221672, + "grad_norm": 2.5723731756936807, + "learning_rate": 1.2561059681590035e-06, + "loss": 0.565, + "step": 13703 + }, + { + "epoch": 0.56548650656103, + "grad_norm": 19.472102694467434, + "learning_rate": 1.2559081621218807e-06, + "loss": 0.5377, + "step": 13704 + }, + { + "epoch": 0.5655277709003879, + "grad_norm": 3.052445553196581, + "learning_rate": 1.2557103604448494e-06, + "loss": 0.495, + "step": 13705 + }, + { + "epoch": 0.5655690352397458, + "grad_norm": 3.6114536757811755, + "learning_rate": 1.2555125631314415e-06, + "loss": 0.4831, + "step": 13706 + }, + { + "epoch": 0.5656102995791037, + "grad_norm": 3.306169155876782, + "learning_rate": 1.2553147701851913e-06, + "loss": 0.5007, + "step": 13707 + }, + { + "epoch": 0.5656515639184617, + "grad_norm": 3.726687573959437, + "learning_rate": 1.2551169816096314e-06, + "loss": 0.4564, + "step": 13708 + }, + { + "epoch": 0.5656928282578196, + "grad_norm": 2.6054321760046952, + "learning_rate": 1.2549191974082948e-06, + "loss": 0.5969, + "step": 13709 + }, + { + "epoch": 0.5657340925971776, + "grad_norm": 5.6198181374265275, + "learning_rate": 1.2547214175847142e-06, + "loss": 0.5177, + "step": 13710 + }, + { + "epoch": 0.5657753569365355, + "grad_norm": 3.466905588744136, + "learning_rate": 1.2545236421424233e-06, + "loss": 0.5383, + "step": 13711 + }, + { + "epoch": 0.5658166212758934, + "grad_norm": 10.216877229081357, + "learning_rate": 1.2543258710849537e-06, + "loss": 0.5749, + "step": 13712 + }, + { + "epoch": 0.5658578856152513, + "grad_norm": 1.839282776700341, + "learning_rate": 1.254128104415839e-06, + "loss": 0.5322, + "step": 13713 + }, + { + "epoch": 0.5658991499546092, + "grad_norm": 3.2173553031787856, + "learning_rate": 1.253930342138611e-06, + "loss": 0.4869, + "step": 13714 + }, + { + "epoch": 0.5659404142939671, + "grad_norm": 2.6372633729195876, + "learning_rate": 1.253732584256803e-06, + "loss": 0.5507, + "step": 13715 + }, + { + "epoch": 0.565981678633325, + "grad_norm": 2.8636530731676437, + "learning_rate": 1.2535348307739466e-06, + "loss": 0.4918, + "step": 13716 + }, + { + "epoch": 0.566022942972683, + "grad_norm": 2.408404732030385, + "learning_rate": 1.2533370816935752e-06, + "loss": 0.5311, + "step": 13717 + }, + { + "epoch": 0.5660642073120409, + "grad_norm": 2.0958814123887457, + "learning_rate": 1.25313933701922e-06, + "loss": 0.4517, + "step": 13718 + }, + { + "epoch": 0.5661054716513989, + "grad_norm": 2.4786921125868977, + "learning_rate": 1.2529415967544144e-06, + "loss": 0.5272, + "step": 13719 + }, + { + "epoch": 0.5661467359907568, + "grad_norm": 5.008951431572883, + "learning_rate": 1.2527438609026895e-06, + "loss": 0.5277, + "step": 13720 + }, + { + "epoch": 0.5661880003301147, + "grad_norm": 2.3974827691089167, + "learning_rate": 1.252546129467578e-06, + "loss": 0.5144, + "step": 13721 + }, + { + "epoch": 0.5662292646694727, + "grad_norm": 2.7779433981675665, + "learning_rate": 1.2523484024526115e-06, + "loss": 0.5118, + "step": 13722 + }, + { + "epoch": 0.5662705290088306, + "grad_norm": 7.775091266971078, + "learning_rate": 1.2521506798613222e-06, + "loss": 0.5421, + "step": 13723 + }, + { + "epoch": 0.5663117933481885, + "grad_norm": 4.285301235005525, + "learning_rate": 1.2519529616972423e-06, + "loss": 0.513, + "step": 13724 + }, + { + "epoch": 0.5663530576875464, + "grad_norm": 2.813072590884791, + "learning_rate": 1.2517552479639023e-06, + "loss": 0.5844, + "step": 13725 + }, + { + "epoch": 0.5663943220269043, + "grad_norm": 2.3083146892119397, + "learning_rate": 1.251557538664835e-06, + "loss": 0.5311, + "step": 13726 + }, + { + "epoch": 0.5664355863662622, + "grad_norm": 2.626018336970221, + "learning_rate": 1.2513598338035711e-06, + "loss": 0.5283, + "step": 13727 + }, + { + "epoch": 0.5664768507056201, + "grad_norm": 4.092168215004395, + "learning_rate": 1.251162133383643e-06, + "loss": 0.4915, + "step": 13728 + }, + { + "epoch": 0.5665181150449782, + "grad_norm": 2.801376189140017, + "learning_rate": 1.2509644374085812e-06, + "loss": 0.4685, + "step": 13729 + }, + { + "epoch": 0.5665593793843361, + "grad_norm": 2.7083552805882483, + "learning_rate": 1.2507667458819181e-06, + "loss": 0.593, + "step": 13730 + }, + { + "epoch": 0.566600643723694, + "grad_norm": 2.60285201743942, + "learning_rate": 1.250569058807184e-06, + "loss": 0.5036, + "step": 13731 + }, + { + "epoch": 0.5666419080630519, + "grad_norm": 3.0743291169631872, + "learning_rate": 1.2503713761879107e-06, + "loss": 0.5228, + "step": 13732 + }, + { + "epoch": 0.5666831724024098, + "grad_norm": 2.368716974793146, + "learning_rate": 1.2501736980276287e-06, + "loss": 0.5185, + "step": 13733 + }, + { + "epoch": 0.5667244367417678, + "grad_norm": 2.6581929176929746, + "learning_rate": 1.24997602432987e-06, + "loss": 0.5332, + "step": 13734 + }, + { + "epoch": 0.5667657010811257, + "grad_norm": 2.147834190997374, + "learning_rate": 1.249778355098165e-06, + "loss": 0.4959, + "step": 13735 + }, + { + "epoch": 0.5668069654204836, + "grad_norm": 2.9686094834560457, + "learning_rate": 1.2495806903360441e-06, + "loss": 0.56, + "step": 13736 + }, + { + "epoch": 0.5668482297598415, + "grad_norm": 6.562541889057641, + "learning_rate": 1.249383030047039e-06, + "loss": 0.4831, + "step": 13737 + }, + { + "epoch": 0.5668894940991994, + "grad_norm": 4.421094041569538, + "learning_rate": 1.2491853742346797e-06, + "loss": 0.5864, + "step": 13738 + }, + { + "epoch": 0.5669307584385574, + "grad_norm": 4.993373734288077, + "learning_rate": 1.248987722902497e-06, + "loss": 0.5321, + "step": 13739 + }, + { + "epoch": 0.5669720227779154, + "grad_norm": 6.562938761346875, + "learning_rate": 1.2487900760540217e-06, + "loss": 0.548, + "step": 13740 + }, + { + "epoch": 0.5670132871172733, + "grad_norm": 2.482185335552566, + "learning_rate": 1.2485924336927836e-06, + "loss": 0.4732, + "step": 13741 + }, + { + "epoch": 0.5670545514566312, + "grad_norm": 8.118009345574608, + "learning_rate": 1.248394795822314e-06, + "loss": 0.481, + "step": 13742 + }, + { + "epoch": 0.5670958157959891, + "grad_norm": 2.450880858385921, + "learning_rate": 1.2481971624461425e-06, + "loss": 0.5207, + "step": 13743 + }, + { + "epoch": 0.567137080135347, + "grad_norm": 2.135201008283818, + "learning_rate": 1.2479995335677997e-06, + "loss": 0.4349, + "step": 13744 + }, + { + "epoch": 0.5671783444747049, + "grad_norm": 4.101576791075447, + "learning_rate": 1.2478019091908155e-06, + "loss": 0.5112, + "step": 13745 + }, + { + "epoch": 0.5672196088140629, + "grad_norm": 2.464195640882767, + "learning_rate": 1.2476042893187202e-06, + "loss": 0.5393, + "step": 13746 + }, + { + "epoch": 0.5672608731534208, + "grad_norm": 2.1756961986068433, + "learning_rate": 1.2474066739550437e-06, + "loss": 0.5237, + "step": 13747 + }, + { + "epoch": 0.5673021374927787, + "grad_norm": 2.276068159980543, + "learning_rate": 1.247209063103316e-06, + "loss": 0.515, + "step": 13748 + }, + { + "epoch": 0.5673434018321367, + "grad_norm": 3.023987754571447, + "learning_rate": 1.2470114567670664e-06, + "loss": 0.5241, + "step": 13749 + }, + { + "epoch": 0.5673846661714946, + "grad_norm": 2.601746367747877, + "learning_rate": 1.246813854949826e-06, + "loss": 0.5426, + "step": 13750 + }, + { + "epoch": 0.5674259305108525, + "grad_norm": 65.71235737144146, + "learning_rate": 1.246616257655123e-06, + "loss": 0.5089, + "step": 13751 + }, + { + "epoch": 0.5674671948502105, + "grad_norm": 2.6581758380695426, + "learning_rate": 1.2464186648864875e-06, + "loss": 0.4887, + "step": 13752 + }, + { + "epoch": 0.5675084591895684, + "grad_norm": 2.4978185445770578, + "learning_rate": 1.2462210766474487e-06, + "loss": 0.5374, + "step": 13753 + }, + { + "epoch": 0.5675497235289263, + "grad_norm": 10.047920979030074, + "learning_rate": 1.246023492941537e-06, + "loss": 0.541, + "step": 13754 + }, + { + "epoch": 0.5675909878682842, + "grad_norm": 3.8577317566913645, + "learning_rate": 1.2458259137722803e-06, + "loss": 0.5248, + "step": 13755 + }, + { + "epoch": 0.5676322522076421, + "grad_norm": 2.8748941404251527, + "learning_rate": 1.245628339143209e-06, + "loss": 0.4952, + "step": 13756 + }, + { + "epoch": 0.567673516547, + "grad_norm": 3.214451638846046, + "learning_rate": 1.2454307690578516e-06, + "loss": 0.5234, + "step": 13757 + }, + { + "epoch": 0.567714780886358, + "grad_norm": 20.499345206917734, + "learning_rate": 1.2452332035197382e-06, + "loss": 0.4913, + "step": 13758 + }, + { + "epoch": 0.567756045225716, + "grad_norm": 3.006827720737416, + "learning_rate": 1.2450356425323963e-06, + "loss": 0.5061, + "step": 13759 + }, + { + "epoch": 0.5677973095650739, + "grad_norm": 5.4042284478301, + "learning_rate": 1.2448380860993561e-06, + "loss": 0.5264, + "step": 13760 + }, + { + "epoch": 0.5678385739044318, + "grad_norm": 2.072668632290837, + "learning_rate": 1.2446405342241459e-06, + "loss": 0.523, + "step": 13761 + }, + { + "epoch": 0.5678798382437897, + "grad_norm": 2.2296541994166885, + "learning_rate": 1.2444429869102946e-06, + "loss": 0.5436, + "step": 13762 + }, + { + "epoch": 0.5679211025831477, + "grad_norm": 2.6962875743255146, + "learning_rate": 1.2442454441613314e-06, + "loss": 0.5437, + "step": 13763 + }, + { + "epoch": 0.5679623669225056, + "grad_norm": 2.5235864495793554, + "learning_rate": 1.2440479059807836e-06, + "loss": 0.4995, + "step": 13764 + }, + { + "epoch": 0.5680036312618635, + "grad_norm": 2.407301497807447, + "learning_rate": 1.2438503723721808e-06, + "loss": 0.473, + "step": 13765 + }, + { + "epoch": 0.5680448956012214, + "grad_norm": 3.70635875700135, + "learning_rate": 1.2436528433390508e-06, + "loss": 0.4946, + "step": 13766 + }, + { + "epoch": 0.5680861599405793, + "grad_norm": 6.534836769345253, + "learning_rate": 1.2434553188849228e-06, + "loss": 0.5014, + "step": 13767 + }, + { + "epoch": 0.5681274242799372, + "grad_norm": 2.3050250301251864, + "learning_rate": 1.2432577990133242e-06, + "loss": 0.4951, + "step": 13768 + }, + { + "epoch": 0.5681686886192953, + "grad_norm": 2.0593897959131393, + "learning_rate": 1.2430602837277837e-06, + "loss": 0.5343, + "step": 13769 + }, + { + "epoch": 0.5682099529586532, + "grad_norm": 5.892296606908685, + "learning_rate": 1.2428627730318293e-06, + "loss": 0.5442, + "step": 13770 + }, + { + "epoch": 0.5682512172980111, + "grad_norm": 2.2843745906692687, + "learning_rate": 1.242665266928989e-06, + "loss": 0.4832, + "step": 13771 + }, + { + "epoch": 0.568292481637369, + "grad_norm": 2.9137641119106736, + "learning_rate": 1.2424677654227906e-06, + "loss": 0.5559, + "step": 13772 + }, + { + "epoch": 0.5683337459767269, + "grad_norm": 4.501889895687974, + "learning_rate": 1.2422702685167628e-06, + "loss": 0.559, + "step": 13773 + }, + { + "epoch": 0.5683750103160848, + "grad_norm": 2.37412706538294, + "learning_rate": 1.2420727762144322e-06, + "loss": 0.5306, + "step": 13774 + }, + { + "epoch": 0.5684162746554428, + "grad_norm": 1.8885321242652737, + "learning_rate": 1.2418752885193272e-06, + "loss": 0.4999, + "step": 13775 + }, + { + "epoch": 0.5684575389948007, + "grad_norm": 3.490153517923185, + "learning_rate": 1.2416778054349757e-06, + "loss": 0.5574, + "step": 13776 + }, + { + "epoch": 0.5684988033341586, + "grad_norm": 2.325951076977662, + "learning_rate": 1.2414803269649045e-06, + "loss": 0.5428, + "step": 13777 + }, + { + "epoch": 0.5685400676735165, + "grad_norm": 2.7877516820531762, + "learning_rate": 1.2412828531126408e-06, + "loss": 0.551, + "step": 13778 + }, + { + "epoch": 0.5685813320128744, + "grad_norm": 2.087656438476244, + "learning_rate": 1.2410853838817133e-06, + "loss": 0.6025, + "step": 13779 + }, + { + "epoch": 0.5686225963522324, + "grad_norm": 2.981478835636354, + "learning_rate": 1.240887919275648e-06, + "loss": 0.5414, + "step": 13780 + }, + { + "epoch": 0.5686638606915904, + "grad_norm": 2.232065082036156, + "learning_rate": 1.2406904592979731e-06, + "loss": 0.548, + "step": 13781 + }, + { + "epoch": 0.5687051250309483, + "grad_norm": 2.6564492379130416, + "learning_rate": 1.240493003952215e-06, + "loss": 0.514, + "step": 13782 + }, + { + "epoch": 0.5687463893703062, + "grad_norm": 2.730276390735203, + "learning_rate": 1.2402955532419013e-06, + "loss": 0.5486, + "step": 13783 + }, + { + "epoch": 0.5687876537096641, + "grad_norm": 1.975206001844339, + "learning_rate": 1.2400981071705584e-06, + "loss": 0.4875, + "step": 13784 + }, + { + "epoch": 0.568828918049022, + "grad_norm": 2.8011746881797754, + "learning_rate": 1.2399006657417137e-06, + "loss": 0.5349, + "step": 13785 + }, + { + "epoch": 0.5688701823883799, + "grad_norm": 2.4184597164591124, + "learning_rate": 1.2397032289588934e-06, + "loss": 0.531, + "step": 13786 + }, + { + "epoch": 0.5689114467277379, + "grad_norm": 2.634896206792186, + "learning_rate": 1.239505796825625e-06, + "loss": 0.5361, + "step": 13787 + }, + { + "epoch": 0.5689527110670958, + "grad_norm": 2.495686842140656, + "learning_rate": 1.2393083693454346e-06, + "loss": 0.5403, + "step": 13788 + }, + { + "epoch": 0.5689939754064537, + "grad_norm": 3.274354957115989, + "learning_rate": 1.239110946521849e-06, + "loss": 0.5252, + "step": 13789 + }, + { + "epoch": 0.5690352397458117, + "grad_norm": 2.1962464019962904, + "learning_rate": 1.2389135283583947e-06, + "loss": 0.564, + "step": 13790 + }, + { + "epoch": 0.5690765040851696, + "grad_norm": 3.7815495281514497, + "learning_rate": 1.238716114858598e-06, + "loss": 0.4965, + "step": 13791 + }, + { + "epoch": 0.5691177684245275, + "grad_norm": 1.867902684642884, + "learning_rate": 1.2385187060259846e-06, + "loss": 0.5157, + "step": 13792 + }, + { + "epoch": 0.5691590327638855, + "grad_norm": 4.416306740578636, + "learning_rate": 1.2383213018640815e-06, + "loss": 0.5916, + "step": 13793 + }, + { + "epoch": 0.5692002971032434, + "grad_norm": 4.041511026843072, + "learning_rate": 1.2381239023764144e-06, + "loss": 0.4578, + "step": 13794 + }, + { + "epoch": 0.5692415614426013, + "grad_norm": 3.614048863355574, + "learning_rate": 1.2379265075665098e-06, + "loss": 0.5196, + "step": 13795 + }, + { + "epoch": 0.5692828257819592, + "grad_norm": 15.573460508573907, + "learning_rate": 1.2377291174378928e-06, + "loss": 0.5304, + "step": 13796 + }, + { + "epoch": 0.5693240901213171, + "grad_norm": 2.966430340767752, + "learning_rate": 1.2375317319940904e-06, + "loss": 0.5453, + "step": 13797 + }, + { + "epoch": 0.569365354460675, + "grad_norm": 3.493461895840113, + "learning_rate": 1.2373343512386276e-06, + "loss": 0.4882, + "step": 13798 + }, + { + "epoch": 0.569406618800033, + "grad_norm": 2.415213423749607, + "learning_rate": 1.2371369751750306e-06, + "loss": 0.5294, + "step": 13799 + }, + { + "epoch": 0.569447883139391, + "grad_norm": 2.2850901697056587, + "learning_rate": 1.2369396038068245e-06, + "loss": 0.5458, + "step": 13800 + }, + { + "epoch": 0.5694891474787489, + "grad_norm": 3.8670699751170035, + "learning_rate": 1.2367422371375354e-06, + "loss": 0.5636, + "step": 13801 + }, + { + "epoch": 0.5695304118181068, + "grad_norm": 2.5184964621978954, + "learning_rate": 1.2365448751706883e-06, + "loss": 0.5664, + "step": 13802 + }, + { + "epoch": 0.5695716761574647, + "grad_norm": 3.5418212659864006, + "learning_rate": 1.2363475179098093e-06, + "loss": 0.5341, + "step": 13803 + }, + { + "epoch": 0.5696129404968227, + "grad_norm": 2.1731499176114366, + "learning_rate": 1.2361501653584231e-06, + "loss": 0.5495, + "step": 13804 + }, + { + "epoch": 0.5696542048361806, + "grad_norm": 3.2343987823038596, + "learning_rate": 1.2359528175200547e-06, + "loss": 0.525, + "step": 13805 + }, + { + "epoch": 0.5696954691755385, + "grad_norm": 4.231314508492897, + "learning_rate": 1.2357554743982295e-06, + "loss": 0.559, + "step": 13806 + }, + { + "epoch": 0.5697367335148964, + "grad_norm": 2.7789044575203694, + "learning_rate": 1.2355581359964724e-06, + "loss": 0.4889, + "step": 13807 + }, + { + "epoch": 0.5697779978542543, + "grad_norm": 4.38112331441233, + "learning_rate": 1.235360802318309e-06, + "loss": 0.5557, + "step": 13808 + }, + { + "epoch": 0.5698192621936122, + "grad_norm": 3.660291852436655, + "learning_rate": 1.2351634733672633e-06, + "loss": 0.5353, + "step": 13809 + }, + { + "epoch": 0.5698605265329703, + "grad_norm": 2.6320555236120424, + "learning_rate": 1.2349661491468606e-06, + "loss": 0.5063, + "step": 13810 + }, + { + "epoch": 0.5699017908723282, + "grad_norm": 4.546295592861789, + "learning_rate": 1.2347688296606253e-06, + "loss": 0.5258, + "step": 13811 + }, + { + "epoch": 0.5699430552116861, + "grad_norm": 2.221868439783712, + "learning_rate": 1.2345715149120828e-06, + "loss": 0.5299, + "step": 13812 + }, + { + "epoch": 0.569984319551044, + "grad_norm": 2.792451143547971, + "learning_rate": 1.2343742049047566e-06, + "loss": 0.5358, + "step": 13813 + }, + { + "epoch": 0.5700255838904019, + "grad_norm": 2.3702234340347523, + "learning_rate": 1.2341768996421716e-06, + "loss": 0.5499, + "step": 13814 + }, + { + "epoch": 0.5700668482297598, + "grad_norm": 3.53989080046477, + "learning_rate": 1.2339795991278524e-06, + "loss": 0.5001, + "step": 13815 + }, + { + "epoch": 0.5701081125691178, + "grad_norm": 3.935655120627982, + "learning_rate": 1.2337823033653232e-06, + "loss": 0.542, + "step": 13816 + }, + { + "epoch": 0.5701493769084757, + "grad_norm": 2.5057864817646753, + "learning_rate": 1.2335850123581075e-06, + "loss": 0.4741, + "step": 13817 + }, + { + "epoch": 0.5701906412478336, + "grad_norm": 6.462609800589049, + "learning_rate": 1.2333877261097302e-06, + "loss": 0.6077, + "step": 13818 + }, + { + "epoch": 0.5702319055871915, + "grad_norm": 5.08783161920965, + "learning_rate": 1.233190444623715e-06, + "loss": 0.4983, + "step": 13819 + }, + { + "epoch": 0.5702731699265495, + "grad_norm": 3.335253572761358, + "learning_rate": 1.2329931679035861e-06, + "loss": 0.4912, + "step": 13820 + }, + { + "epoch": 0.5703144342659074, + "grad_norm": 4.3780653759681725, + "learning_rate": 1.2327958959528669e-06, + "loss": 0.5476, + "step": 13821 + }, + { + "epoch": 0.5703556986052654, + "grad_norm": 2.4338695009966234, + "learning_rate": 1.2325986287750815e-06, + "loss": 0.5224, + "step": 13822 + }, + { + "epoch": 0.5703969629446233, + "grad_norm": 11.063147535943644, + "learning_rate": 1.2324013663737534e-06, + "loss": 0.5886, + "step": 13823 + }, + { + "epoch": 0.5704382272839812, + "grad_norm": 4.085412021311586, + "learning_rate": 1.2322041087524068e-06, + "loss": 0.5557, + "step": 13824 + }, + { + "epoch": 0.5704794916233391, + "grad_norm": 15.203107836530545, + "learning_rate": 1.2320068559145641e-06, + "loss": 0.4958, + "step": 13825 + }, + { + "epoch": 0.570520755962697, + "grad_norm": 3.1462836059713775, + "learning_rate": 1.2318096078637498e-06, + "loss": 0.5149, + "step": 13826 + }, + { + "epoch": 0.5705620203020549, + "grad_norm": 2.0782001106743633, + "learning_rate": 1.2316123646034868e-06, + "loss": 0.5228, + "step": 13827 + }, + { + "epoch": 0.5706032846414129, + "grad_norm": 2.5034988366383244, + "learning_rate": 1.2314151261372983e-06, + "loss": 0.503, + "step": 13828 + }, + { + "epoch": 0.5706445489807708, + "grad_norm": 12.031271573176754, + "learning_rate": 1.231217892468708e-06, + "loss": 0.5436, + "step": 13829 + }, + { + "epoch": 0.5706858133201288, + "grad_norm": 3.4953570207722184, + "learning_rate": 1.231020663601238e-06, + "loss": 0.4577, + "step": 13830 + }, + { + "epoch": 0.5707270776594867, + "grad_norm": 2.5800681850404477, + "learning_rate": 1.230823439538412e-06, + "loss": 0.5496, + "step": 13831 + }, + { + "epoch": 0.5707683419988446, + "grad_norm": 6.711809803156918, + "learning_rate": 1.2306262202837526e-06, + "loss": 0.5814, + "step": 13832 + }, + { + "epoch": 0.5708096063382025, + "grad_norm": 8.208611010212165, + "learning_rate": 1.2304290058407827e-06, + "loss": 0.4779, + "step": 13833 + }, + { + "epoch": 0.5708508706775605, + "grad_norm": 3.7676229781635455, + "learning_rate": 1.2302317962130254e-06, + "loss": 0.5176, + "step": 13834 + }, + { + "epoch": 0.5708921350169184, + "grad_norm": 3.6356893185167314, + "learning_rate": 1.2300345914040026e-06, + "loss": 0.5291, + "step": 13835 + }, + { + "epoch": 0.5709333993562763, + "grad_norm": 6.315418499029298, + "learning_rate": 1.229837391417238e-06, + "loss": 0.5301, + "step": 13836 + }, + { + "epoch": 0.5709746636956342, + "grad_norm": 2.1839391669048442, + "learning_rate": 1.2296401962562528e-06, + "loss": 0.4872, + "step": 13837 + }, + { + "epoch": 0.5710159280349921, + "grad_norm": 3.054000494305303, + "learning_rate": 1.2294430059245706e-06, + "loss": 0.5003, + "step": 13838 + }, + { + "epoch": 0.57105719237435, + "grad_norm": 3.093962379080163, + "learning_rate": 1.2292458204257126e-06, + "loss": 0.5195, + "step": 13839 + }, + { + "epoch": 0.571098456713708, + "grad_norm": 2.340936078002463, + "learning_rate": 1.229048639763202e-06, + "loss": 0.5429, + "step": 13840 + }, + { + "epoch": 0.571139721053066, + "grad_norm": 3.0138204622045057, + "learning_rate": 1.2288514639405602e-06, + "loss": 0.5487, + "step": 13841 + }, + { + "epoch": 0.5711809853924239, + "grad_norm": 3.391668922301844, + "learning_rate": 1.22865429296131e-06, + "loss": 0.5707, + "step": 13842 + }, + { + "epoch": 0.5712222497317818, + "grad_norm": 1.7917099435879886, + "learning_rate": 1.228457126828973e-06, + "loss": 0.5289, + "step": 13843 + }, + { + "epoch": 0.5712635140711397, + "grad_norm": 5.31246838024202, + "learning_rate": 1.2282599655470703e-06, + "loss": 0.5245, + "step": 13844 + }, + { + "epoch": 0.5713047784104976, + "grad_norm": 2.2129091628035917, + "learning_rate": 1.228062809119125e-06, + "loss": 0.5401, + "step": 13845 + }, + { + "epoch": 0.5713460427498556, + "grad_norm": 2.5391902389347507, + "learning_rate": 1.2278656575486577e-06, + "loss": 0.5459, + "step": 13846 + }, + { + "epoch": 0.5713873070892135, + "grad_norm": 2.033631950552594, + "learning_rate": 1.227668510839191e-06, + "loss": 0.5686, + "step": 13847 + }, + { + "epoch": 0.5714285714285714, + "grad_norm": 2.679670942672661, + "learning_rate": 1.2274713689942456e-06, + "loss": 0.4813, + "step": 13848 + }, + { + "epoch": 0.5714698357679293, + "grad_norm": 2.976278937415261, + "learning_rate": 1.2272742320173436e-06, + "loss": 0.4672, + "step": 13849 + }, + { + "epoch": 0.5715111001072872, + "grad_norm": 27.325842478617183, + "learning_rate": 1.227077099912006e-06, + "loss": 0.5048, + "step": 13850 + }, + { + "epoch": 0.5715523644466453, + "grad_norm": 3.410223412128195, + "learning_rate": 1.226879972681754e-06, + "loss": 0.4994, + "step": 13851 + }, + { + "epoch": 0.5715936287860032, + "grad_norm": 8.39878123599345, + "learning_rate": 1.2266828503301094e-06, + "loss": 0.5358, + "step": 13852 + }, + { + "epoch": 0.5716348931253611, + "grad_norm": 3.861543122729267, + "learning_rate": 1.2264857328605922e-06, + "loss": 0.5457, + "step": 13853 + }, + { + "epoch": 0.571676157464719, + "grad_norm": 3.233719082555198, + "learning_rate": 1.2262886202767244e-06, + "loss": 0.545, + "step": 13854 + }, + { + "epoch": 0.5717174218040769, + "grad_norm": 1.8523401826388761, + "learning_rate": 1.2260915125820267e-06, + "loss": 0.5362, + "step": 13855 + }, + { + "epoch": 0.5717586861434348, + "grad_norm": 18.41756035014513, + "learning_rate": 1.2258944097800194e-06, + "loss": 0.5139, + "step": 13856 + }, + { + "epoch": 0.5717999504827928, + "grad_norm": 3.0922259816520796, + "learning_rate": 1.225697311874224e-06, + "loss": 0.5468, + "step": 13857 + }, + { + "epoch": 0.5718412148221507, + "grad_norm": 3.2085096868579637, + "learning_rate": 1.2255002188681606e-06, + "loss": 0.5696, + "step": 13858 + }, + { + "epoch": 0.5718824791615086, + "grad_norm": 2.6015928332700224, + "learning_rate": 1.22530313076535e-06, + "loss": 0.5408, + "step": 13859 + }, + { + "epoch": 0.5719237435008665, + "grad_norm": 3.7906046429182627, + "learning_rate": 1.2251060475693124e-06, + "loss": 0.5012, + "step": 13860 + }, + { + "epoch": 0.5719650078402245, + "grad_norm": 4.294922833307127, + "learning_rate": 1.224908969283569e-06, + "loss": 0.5111, + "step": 13861 + }, + { + "epoch": 0.5720062721795824, + "grad_norm": 3.463313138559361, + "learning_rate": 1.224711895911639e-06, + "loss": 0.553, + "step": 13862 + }, + { + "epoch": 0.5720475365189404, + "grad_norm": 2.7336448133105744, + "learning_rate": 1.224514827457044e-06, + "loss": 0.4956, + "step": 13863 + }, + { + "epoch": 0.5720888008582983, + "grad_norm": 3.1311490868303835, + "learning_rate": 1.2243177639233024e-06, + "loss": 0.4924, + "step": 13864 + }, + { + "epoch": 0.5721300651976562, + "grad_norm": 2.486441527951696, + "learning_rate": 1.224120705313936e-06, + "loss": 0.4789, + "step": 13865 + }, + { + "epoch": 0.5721713295370141, + "grad_norm": 2.851006968291944, + "learning_rate": 1.2239236516324634e-06, + "loss": 0.4895, + "step": 13866 + }, + { + "epoch": 0.572212593876372, + "grad_norm": 3.2833131801132276, + "learning_rate": 1.2237266028824052e-06, + "loss": 0.5262, + "step": 13867 + }, + { + "epoch": 0.5722538582157299, + "grad_norm": 3.805595278335352, + "learning_rate": 1.2235295590672815e-06, + "loss": 0.4851, + "step": 13868 + }, + { + "epoch": 0.5722951225550879, + "grad_norm": 2.3141875955079625, + "learning_rate": 1.2233325201906112e-06, + "loss": 0.4789, + "step": 13869 + }, + { + "epoch": 0.5723363868944458, + "grad_norm": 22.671861270142283, + "learning_rate": 1.2231354862559138e-06, + "loss": 0.4446, + "step": 13870 + }, + { + "epoch": 0.5723776512338038, + "grad_norm": 2.914747700499991, + "learning_rate": 1.2229384572667097e-06, + "loss": 0.4922, + "step": 13871 + }, + { + "epoch": 0.5724189155731617, + "grad_norm": 2.9441286859558993, + "learning_rate": 1.2227414332265174e-06, + "loss": 0.5498, + "step": 13872 + }, + { + "epoch": 0.5724601799125196, + "grad_norm": 3.487486478381683, + "learning_rate": 1.222544414138857e-06, + "loss": 0.5324, + "step": 13873 + }, + { + "epoch": 0.5725014442518775, + "grad_norm": 5.27855278419253, + "learning_rate": 1.2223474000072471e-06, + "loss": 0.5349, + "step": 13874 + }, + { + "epoch": 0.5725427085912355, + "grad_norm": 3.2626779381639572, + "learning_rate": 1.2221503908352077e-06, + "loss": 0.5962, + "step": 13875 + }, + { + "epoch": 0.5725839729305934, + "grad_norm": 5.280966743814145, + "learning_rate": 1.221953386626257e-06, + "loss": 0.5389, + "step": 13876 + }, + { + "epoch": 0.5726252372699513, + "grad_norm": 2.4200517263224133, + "learning_rate": 1.2217563873839147e-06, + "loss": 0.5196, + "step": 13877 + }, + { + "epoch": 0.5726665016093092, + "grad_norm": 3.4476984610647254, + "learning_rate": 1.221559393111699e-06, + "loss": 0.5168, + "step": 13878 + }, + { + "epoch": 0.5727077659486671, + "grad_norm": 2.165616737456609, + "learning_rate": 1.2213624038131293e-06, + "loss": 0.5196, + "step": 13879 + }, + { + "epoch": 0.572749030288025, + "grad_norm": 3.6177315270111774, + "learning_rate": 1.221165419491724e-06, + "loss": 0.4672, + "step": 13880 + }, + { + "epoch": 0.5727902946273831, + "grad_norm": 2.1051278995753906, + "learning_rate": 1.2209684401510024e-06, + "loss": 0.5261, + "step": 13881 + }, + { + "epoch": 0.572831558966741, + "grad_norm": 4.37750188945771, + "learning_rate": 1.2207714657944823e-06, + "loss": 0.4959, + "step": 13882 + }, + { + "epoch": 0.5728728233060989, + "grad_norm": 2.8501097155752477, + "learning_rate": 1.220574496425682e-06, + "loss": 0.5455, + "step": 13883 + }, + { + "epoch": 0.5729140876454568, + "grad_norm": 6.947035828975864, + "learning_rate": 1.2203775320481204e-06, + "loss": 0.5257, + "step": 13884 + }, + { + "epoch": 0.5729553519848147, + "grad_norm": 3.5942902998407376, + "learning_rate": 1.2201805726653152e-06, + "loss": 0.4817, + "step": 13885 + }, + { + "epoch": 0.5729966163241726, + "grad_norm": 3.783493284695126, + "learning_rate": 1.2199836182807855e-06, + "loss": 0.5457, + "step": 13886 + }, + { + "epoch": 0.5730378806635306, + "grad_norm": 4.539569055482814, + "learning_rate": 1.2197866688980486e-06, + "loss": 0.5421, + "step": 13887 + }, + { + "epoch": 0.5730791450028885, + "grad_norm": 2.403274700661176, + "learning_rate": 1.2195897245206228e-06, + "loss": 0.5573, + "step": 13888 + }, + { + "epoch": 0.5731204093422464, + "grad_norm": 3.4440516834510944, + "learning_rate": 1.2193927851520258e-06, + "loss": 0.6025, + "step": 13889 + }, + { + "epoch": 0.5731616736816043, + "grad_norm": 3.6563342185681296, + "learning_rate": 1.2191958507957761e-06, + "loss": 0.5561, + "step": 13890 + }, + { + "epoch": 0.5732029380209623, + "grad_norm": 2.398958940735195, + "learning_rate": 1.2189989214553909e-06, + "loss": 0.5104, + "step": 13891 + }, + { + "epoch": 0.5732442023603203, + "grad_norm": 4.030359671945944, + "learning_rate": 1.2188019971343875e-06, + "loss": 0.5206, + "step": 13892 + }, + { + "epoch": 0.5732854666996782, + "grad_norm": 4.178789384116644, + "learning_rate": 1.2186050778362843e-06, + "loss": 0.5484, + "step": 13893 + }, + { + "epoch": 0.5733267310390361, + "grad_norm": 2.8032372848055154, + "learning_rate": 1.218408163564598e-06, + "loss": 0.5832, + "step": 13894 + }, + { + "epoch": 0.573367995378394, + "grad_norm": 2.8012770429296485, + "learning_rate": 1.2182112543228468e-06, + "loss": 0.5222, + "step": 13895 + }, + { + "epoch": 0.5734092597177519, + "grad_norm": 8.562992507553043, + "learning_rate": 1.2180143501145476e-06, + "loss": 0.4902, + "step": 13896 + }, + { + "epoch": 0.5734505240571098, + "grad_norm": 1.9889439182078488, + "learning_rate": 1.217817450943217e-06, + "loss": 0.5049, + "step": 13897 + }, + { + "epoch": 0.5734917883964677, + "grad_norm": 4.289800528180505, + "learning_rate": 1.2176205568123728e-06, + "loss": 0.5491, + "step": 13898 + }, + { + "epoch": 0.5735330527358257, + "grad_norm": 2.4690717217445926, + "learning_rate": 1.2174236677255318e-06, + "loss": 0.5076, + "step": 13899 + }, + { + "epoch": 0.5735743170751836, + "grad_norm": 6.244259270670228, + "learning_rate": 1.217226783686211e-06, + "loss": 0.4689, + "step": 13900 + }, + { + "epoch": 0.5736155814145415, + "grad_norm": 2.440159622513541, + "learning_rate": 1.2170299046979272e-06, + "loss": 0.503, + "step": 13901 + }, + { + "epoch": 0.5736568457538995, + "grad_norm": 2.420948174196811, + "learning_rate": 1.2168330307641975e-06, + "loss": 0.547, + "step": 13902 + }, + { + "epoch": 0.5736981100932574, + "grad_norm": 4.589876115227085, + "learning_rate": 1.2166361618885377e-06, + "loss": 0.489, + "step": 13903 + }, + { + "epoch": 0.5737393744326154, + "grad_norm": 4.854516990738096, + "learning_rate": 1.2164392980744656e-06, + "loss": 0.5089, + "step": 13904 + }, + { + "epoch": 0.5737806387719733, + "grad_norm": 3.764545269095738, + "learning_rate": 1.2162424393254963e-06, + "loss": 0.5113, + "step": 13905 + }, + { + "epoch": 0.5738219031113312, + "grad_norm": 2.5729278155056643, + "learning_rate": 1.2160455856451473e-06, + "loss": 0.5355, + "step": 13906 + }, + { + "epoch": 0.5738631674506891, + "grad_norm": 4.549911300637195, + "learning_rate": 1.2158487370369342e-06, + "loss": 0.5133, + "step": 13907 + }, + { + "epoch": 0.573904431790047, + "grad_norm": 4.413476032117796, + "learning_rate": 1.2156518935043746e-06, + "loss": 0.5229, + "step": 13908 + }, + { + "epoch": 0.5739456961294049, + "grad_norm": 2.160379371108376, + "learning_rate": 1.2154550550509825e-06, + "loss": 0.5752, + "step": 13909 + }, + { + "epoch": 0.5739869604687629, + "grad_norm": 5.18238951253366, + "learning_rate": 1.215258221680275e-06, + "loss": 0.533, + "step": 13910 + }, + { + "epoch": 0.5740282248081208, + "grad_norm": 4.050879233944441, + "learning_rate": 1.215061393395768e-06, + "loss": 0.5727, + "step": 13911 + }, + { + "epoch": 0.5740694891474788, + "grad_norm": 8.703653585329505, + "learning_rate": 1.2148645702009774e-06, + "loss": 0.4794, + "step": 13912 + }, + { + "epoch": 0.5741107534868367, + "grad_norm": 2.7729847538854426, + "learning_rate": 1.2146677520994187e-06, + "loss": 0.5446, + "step": 13913 + }, + { + "epoch": 0.5741520178261946, + "grad_norm": 2.4697249153240444, + "learning_rate": 1.214470939094608e-06, + "loss": 0.5409, + "step": 13914 + }, + { + "epoch": 0.5741932821655525, + "grad_norm": 2.9458912499201446, + "learning_rate": 1.2142741311900603e-06, + "loss": 0.5556, + "step": 13915 + }, + { + "epoch": 0.5742345465049105, + "grad_norm": 3.528997823830037, + "learning_rate": 1.2140773283892915e-06, + "loss": 0.5398, + "step": 13916 + }, + { + "epoch": 0.5742758108442684, + "grad_norm": 2.143653405779685, + "learning_rate": 1.2138805306958168e-06, + "loss": 0.5102, + "step": 13917 + }, + { + "epoch": 0.5743170751836263, + "grad_norm": 3.4033328390567825, + "learning_rate": 1.213683738113152e-06, + "loss": 0.5475, + "step": 13918 + }, + { + "epoch": 0.5743583395229842, + "grad_norm": 2.025081992915792, + "learning_rate": 1.2134869506448115e-06, + "loss": 0.4757, + "step": 13919 + }, + { + "epoch": 0.5743996038623421, + "grad_norm": 2.0567089466608004, + "learning_rate": 1.213290168294311e-06, + "loss": 0.4459, + "step": 13920 + }, + { + "epoch": 0.5744408682017, + "grad_norm": 2.086076822977338, + "learning_rate": 1.2130933910651656e-06, + "loss": 0.5126, + "step": 13921 + }, + { + "epoch": 0.5744821325410581, + "grad_norm": 2.8400187300788335, + "learning_rate": 1.2128966189608895e-06, + "loss": 0.4937, + "step": 13922 + }, + { + "epoch": 0.574523396880416, + "grad_norm": 2.690939922237985, + "learning_rate": 1.2126998519849984e-06, + "loss": 0.5238, + "step": 13923 + }, + { + "epoch": 0.5745646612197739, + "grad_norm": 4.641058182261227, + "learning_rate": 1.2125030901410062e-06, + "loss": 0.5045, + "step": 13924 + }, + { + "epoch": 0.5746059255591318, + "grad_norm": 11.849314797090146, + "learning_rate": 1.2123063334324283e-06, + "loss": 0.504, + "step": 13925 + }, + { + "epoch": 0.5746471898984897, + "grad_norm": 2.2096237709646975, + "learning_rate": 1.2121095818627786e-06, + "loss": 0.5205, + "step": 13926 + }, + { + "epoch": 0.5746884542378476, + "grad_norm": 3.7305315905980483, + "learning_rate": 1.2119128354355726e-06, + "loss": 0.5295, + "step": 13927 + }, + { + "epoch": 0.5747297185772056, + "grad_norm": 2.484044586072143, + "learning_rate": 1.2117160941543234e-06, + "loss": 0.5179, + "step": 13928 + }, + { + "epoch": 0.5747709829165635, + "grad_norm": 3.731317632283692, + "learning_rate": 1.2115193580225464e-06, + "loss": 0.4848, + "step": 13929 + }, + { + "epoch": 0.5748122472559214, + "grad_norm": 3.1747718358416077, + "learning_rate": 1.2113226270437552e-06, + "loss": 0.5183, + "step": 13930 + }, + { + "epoch": 0.5748535115952793, + "grad_norm": 8.492574578965304, + "learning_rate": 1.2111259012214637e-06, + "loss": 0.5356, + "step": 13931 + }, + { + "epoch": 0.5748947759346373, + "grad_norm": 2.538448626921173, + "learning_rate": 1.210929180559187e-06, + "loss": 0.5226, + "step": 13932 + }, + { + "epoch": 0.5749360402739953, + "grad_norm": 2.6092165901738937, + "learning_rate": 1.2107324650604375e-06, + "loss": 0.5122, + "step": 13933 + }, + { + "epoch": 0.5749773046133532, + "grad_norm": 40.681700731488014, + "learning_rate": 1.2105357547287306e-06, + "loss": 0.5623, + "step": 13934 + }, + { + "epoch": 0.5750185689527111, + "grad_norm": 3.012875698371477, + "learning_rate": 1.210339049567579e-06, + "loss": 0.5462, + "step": 13935 + }, + { + "epoch": 0.575059833292069, + "grad_norm": 3.211303501887158, + "learning_rate": 1.2101423495804963e-06, + "loss": 0.5078, + "step": 13936 + }, + { + "epoch": 0.5751010976314269, + "grad_norm": 9.727851396112737, + "learning_rate": 1.2099456547709966e-06, + "loss": 0.5312, + "step": 13937 + }, + { + "epoch": 0.5751423619707848, + "grad_norm": 2.110619835392334, + "learning_rate": 1.2097489651425926e-06, + "loss": 0.4951, + "step": 13938 + }, + { + "epoch": 0.5751836263101427, + "grad_norm": 2.070613874104924, + "learning_rate": 1.209552280698799e-06, + "loss": 0.5225, + "step": 13939 + }, + { + "epoch": 0.5752248906495007, + "grad_norm": 2.090551823829356, + "learning_rate": 1.2093556014431274e-06, + "loss": 0.4812, + "step": 13940 + }, + { + "epoch": 0.5752661549888586, + "grad_norm": 5.27142410584654, + "learning_rate": 1.2091589273790923e-06, + "loss": 0.4784, + "step": 13941 + }, + { + "epoch": 0.5753074193282166, + "grad_norm": 3.3996879791653987, + "learning_rate": 1.208962258510206e-06, + "loss": 0.5718, + "step": 13942 + }, + { + "epoch": 0.5753486836675745, + "grad_norm": 5.10370102547204, + "learning_rate": 1.2087655948399823e-06, + "loss": 0.5663, + "step": 13943 + }, + { + "epoch": 0.5753899480069324, + "grad_norm": 2.917143445214355, + "learning_rate": 1.2085689363719334e-06, + "loss": 0.4579, + "step": 13944 + }, + { + "epoch": 0.5754312123462904, + "grad_norm": 3.1073627048307695, + "learning_rate": 1.2083722831095723e-06, + "loss": 0.5397, + "step": 13945 + }, + { + "epoch": 0.5754724766856483, + "grad_norm": 6.689069857538752, + "learning_rate": 1.208175635056412e-06, + "loss": 0.5498, + "step": 13946 + }, + { + "epoch": 0.5755137410250062, + "grad_norm": 2.5421307486035007, + "learning_rate": 1.2079789922159652e-06, + "loss": 0.5147, + "step": 13947 + }, + { + "epoch": 0.5755550053643641, + "grad_norm": 3.824044448657049, + "learning_rate": 1.2077823545917436e-06, + "loss": 0.5527, + "step": 13948 + }, + { + "epoch": 0.575596269703722, + "grad_norm": 1.8638042748370671, + "learning_rate": 1.2075857221872603e-06, + "loss": 0.5137, + "step": 13949 + }, + { + "epoch": 0.5756375340430799, + "grad_norm": 1.8948031667471352, + "learning_rate": 1.207389095006027e-06, + "loss": 0.529, + "step": 13950 + }, + { + "epoch": 0.5756787983824379, + "grad_norm": 3.2618230961475643, + "learning_rate": 1.2071924730515572e-06, + "loss": 0.5413, + "step": 13951 + }, + { + "epoch": 0.5757200627217959, + "grad_norm": 2.2405417078543666, + "learning_rate": 1.2069958563273618e-06, + "loss": 0.5281, + "step": 13952 + }, + { + "epoch": 0.5757613270611538, + "grad_norm": 1.8196730281333655, + "learning_rate": 1.2067992448369537e-06, + "loss": 0.4666, + "step": 13953 + }, + { + "epoch": 0.5758025914005117, + "grad_norm": 3.9856575870679585, + "learning_rate": 1.2066026385838444e-06, + "loss": 0.4847, + "step": 13954 + }, + { + "epoch": 0.5758438557398696, + "grad_norm": 3.957825952667185, + "learning_rate": 1.206406037571546e-06, + "loss": 0.4586, + "step": 13955 + }, + { + "epoch": 0.5758851200792275, + "grad_norm": 2.961851383835426, + "learning_rate": 1.2062094418035702e-06, + "loss": 0.5258, + "step": 13956 + }, + { + "epoch": 0.5759263844185855, + "grad_norm": 3.974195137820532, + "learning_rate": 1.2060128512834288e-06, + "loss": 0.5921, + "step": 13957 + }, + { + "epoch": 0.5759676487579434, + "grad_norm": 3.694402429621487, + "learning_rate": 1.2058162660146332e-06, + "loss": 0.4828, + "step": 13958 + }, + { + "epoch": 0.5760089130973013, + "grad_norm": 2.3442357431592677, + "learning_rate": 1.2056196860006952e-06, + "loss": 0.5422, + "step": 13959 + }, + { + "epoch": 0.5760501774366592, + "grad_norm": 2.4126187009084967, + "learning_rate": 1.2054231112451265e-06, + "loss": 0.5432, + "step": 13960 + }, + { + "epoch": 0.5760914417760171, + "grad_norm": 9.770498315551148, + "learning_rate": 1.205226541751437e-06, + "loss": 0.5832, + "step": 13961 + }, + { + "epoch": 0.5761327061153751, + "grad_norm": 2.2818351572337328, + "learning_rate": 1.2050299775231395e-06, + "loss": 0.5375, + "step": 13962 + }, + { + "epoch": 0.5761739704547331, + "grad_norm": 2.1641755364533073, + "learning_rate": 1.204833418563744e-06, + "loss": 0.4911, + "step": 13963 + }, + { + "epoch": 0.576215234794091, + "grad_norm": 1.9480032636356832, + "learning_rate": 1.2046368648767625e-06, + "loss": 0.4616, + "step": 13964 + }, + { + "epoch": 0.5762564991334489, + "grad_norm": 5.718428672635009, + "learning_rate": 1.2044403164657053e-06, + "loss": 0.5007, + "step": 13965 + }, + { + "epoch": 0.5762977634728068, + "grad_norm": 3.5838498048488217, + "learning_rate": 1.2042437733340835e-06, + "loss": 0.4762, + "step": 13966 + }, + { + "epoch": 0.5763390278121647, + "grad_norm": 19.013700582625177, + "learning_rate": 1.2040472354854074e-06, + "loss": 0.4938, + "step": 13967 + }, + { + "epoch": 0.5763802921515226, + "grad_norm": 2.065046098148071, + "learning_rate": 1.2038507029231883e-06, + "loss": 0.5448, + "step": 13968 + }, + { + "epoch": 0.5764215564908806, + "grad_norm": 2.3395274629637304, + "learning_rate": 1.2036541756509367e-06, + "loss": 0.565, + "step": 13969 + }, + { + "epoch": 0.5764628208302385, + "grad_norm": 2.5785801010348504, + "learning_rate": 1.2034576536721626e-06, + "loss": 0.4912, + "step": 13970 + }, + { + "epoch": 0.5765040851695964, + "grad_norm": 4.644928379807288, + "learning_rate": 1.2032611369903768e-06, + "loss": 0.4857, + "step": 13971 + }, + { + "epoch": 0.5765453495089543, + "grad_norm": 3.804632035602279, + "learning_rate": 1.2030646256090892e-06, + "loss": 0.5131, + "step": 13972 + }, + { + "epoch": 0.5765866138483123, + "grad_norm": 3.125665816525423, + "learning_rate": 1.2028681195318105e-06, + "loss": 0.5259, + "step": 13973 + }, + { + "epoch": 0.5766278781876703, + "grad_norm": 2.848871973733943, + "learning_rate": 1.2026716187620507e-06, + "loss": 0.5373, + "step": 13974 + }, + { + "epoch": 0.5766691425270282, + "grad_norm": 2.8284208999713467, + "learning_rate": 1.202475123303319e-06, + "loss": 0.4403, + "step": 13975 + }, + { + "epoch": 0.5767104068663861, + "grad_norm": 4.792006662676933, + "learning_rate": 1.2022786331591264e-06, + "loss": 0.5117, + "step": 13976 + }, + { + "epoch": 0.576751671205744, + "grad_norm": 5.5050206757645, + "learning_rate": 1.2020821483329817e-06, + "loss": 0.5218, + "step": 13977 + }, + { + "epoch": 0.5767929355451019, + "grad_norm": 2.017332952728173, + "learning_rate": 1.2018856688283954e-06, + "loss": 0.4977, + "step": 13978 + }, + { + "epoch": 0.5768341998844598, + "grad_norm": 8.908744791863914, + "learning_rate": 1.2016891946488766e-06, + "loss": 0.4758, + "step": 13979 + }, + { + "epoch": 0.5768754642238177, + "grad_norm": 3.009281298632659, + "learning_rate": 1.2014927257979354e-06, + "loss": 0.5405, + "step": 13980 + }, + { + "epoch": 0.5769167285631757, + "grad_norm": 2.0068517037759803, + "learning_rate": 1.2012962622790805e-06, + "loss": 0.4988, + "step": 13981 + }, + { + "epoch": 0.5769579929025336, + "grad_norm": 2.087085831590462, + "learning_rate": 1.2010998040958218e-06, + "loss": 0.5379, + "step": 13982 + }, + { + "epoch": 0.5769992572418916, + "grad_norm": 2.359464269301298, + "learning_rate": 1.200903351251668e-06, + "loss": 0.539, + "step": 13983 + }, + { + "epoch": 0.5770405215812495, + "grad_norm": 2.1038086329320915, + "learning_rate": 1.2007069037501291e-06, + "loss": 0.5389, + "step": 13984 + }, + { + "epoch": 0.5770817859206074, + "grad_norm": 3.2730087052421406, + "learning_rate": 1.2005104615947132e-06, + "loss": 0.5626, + "step": 13985 + }, + { + "epoch": 0.5771230502599654, + "grad_norm": 1.895824135421145, + "learning_rate": 1.2003140247889299e-06, + "loss": 0.5063, + "step": 13986 + }, + { + "epoch": 0.5771643145993233, + "grad_norm": 2.7410754050799184, + "learning_rate": 1.2001175933362883e-06, + "loss": 0.5569, + "step": 13987 + }, + { + "epoch": 0.5772055789386812, + "grad_norm": 3.1218750989678075, + "learning_rate": 1.1999211672402964e-06, + "loss": 0.5667, + "step": 13988 + }, + { + "epoch": 0.5772468432780391, + "grad_norm": 1.8236882977053095, + "learning_rate": 1.1997247465044627e-06, + "loss": 0.5194, + "step": 13989 + }, + { + "epoch": 0.577288107617397, + "grad_norm": 2.4191523551621366, + "learning_rate": 1.1995283311322963e-06, + "loss": 0.5208, + "step": 13990 + }, + { + "epoch": 0.5773293719567549, + "grad_norm": 6.319205627528424, + "learning_rate": 1.1993319211273056e-06, + "loss": 0.5126, + "step": 13991 + }, + { + "epoch": 0.5773706362961128, + "grad_norm": 2.5436147616668263, + "learning_rate": 1.1991355164929992e-06, + "loss": 0.4937, + "step": 13992 + }, + { + "epoch": 0.5774119006354709, + "grad_norm": 2.363766755558069, + "learning_rate": 1.1989391172328845e-06, + "loss": 0.5555, + "step": 13993 + }, + { + "epoch": 0.5774531649748288, + "grad_norm": 3.043793902761363, + "learning_rate": 1.198742723350471e-06, + "loss": 0.4845, + "step": 13994 + }, + { + "epoch": 0.5774944293141867, + "grad_norm": 2.6581616374292754, + "learning_rate": 1.1985463348492657e-06, + "loss": 0.5652, + "step": 13995 + }, + { + "epoch": 0.5775356936535446, + "grad_norm": 3.0987746924105606, + "learning_rate": 1.1983499517327772e-06, + "loss": 0.5199, + "step": 13996 + }, + { + "epoch": 0.5775769579929025, + "grad_norm": 2.5519222277881606, + "learning_rate": 1.1981535740045127e-06, + "loss": 0.547, + "step": 13997 + }, + { + "epoch": 0.5776182223322605, + "grad_norm": 2.1965035404029245, + "learning_rate": 1.197957201667981e-06, + "loss": 0.5725, + "step": 13998 + }, + { + "epoch": 0.5776594866716184, + "grad_norm": 5.776553049914662, + "learning_rate": 1.197760834726689e-06, + "loss": 0.5281, + "step": 13999 + }, + { + "epoch": 0.5777007510109763, + "grad_norm": 2.705432769177785, + "learning_rate": 1.1975644731841451e-06, + "loss": 0.5074, + "step": 14000 + }, + { + "epoch": 0.5777420153503342, + "grad_norm": 3.874646292407652, + "learning_rate": 1.197368117043856e-06, + "loss": 0.6176, + "step": 14001 + }, + { + "epoch": 0.5777832796896921, + "grad_norm": 10.723353399425898, + "learning_rate": 1.1971717663093293e-06, + "loss": 0.4547, + "step": 14002 + }, + { + "epoch": 0.5778245440290501, + "grad_norm": 2.302591828475013, + "learning_rate": 1.1969754209840724e-06, + "loss": 0.513, + "step": 14003 + }, + { + "epoch": 0.5778658083684081, + "grad_norm": 2.2562123348403995, + "learning_rate": 1.1967790810715922e-06, + "loss": 0.5381, + "step": 14004 + }, + { + "epoch": 0.577907072707766, + "grad_norm": 4.423071599450479, + "learning_rate": 1.1965827465753969e-06, + "loss": 0.5832, + "step": 14005 + }, + { + "epoch": 0.5779483370471239, + "grad_norm": 3.8009680157364016, + "learning_rate": 1.1963864174989918e-06, + "loss": 0.5291, + "step": 14006 + }, + { + "epoch": 0.5779896013864818, + "grad_norm": 4.216384991371064, + "learning_rate": 1.1961900938458857e-06, + "loss": 0.5024, + "step": 14007 + }, + { + "epoch": 0.5780308657258397, + "grad_norm": 3.280267450727201, + "learning_rate": 1.1959937756195843e-06, + "loss": 0.5206, + "step": 14008 + }, + { + "epoch": 0.5780721300651976, + "grad_norm": 3.5715895761993264, + "learning_rate": 1.1957974628235942e-06, + "loss": 0.495, + "step": 14009 + }, + { + "epoch": 0.5781133944045556, + "grad_norm": 2.263860188991468, + "learning_rate": 1.195601155461423e-06, + "loss": 0.5014, + "step": 14010 + }, + { + "epoch": 0.5781546587439135, + "grad_norm": 6.070748001660958, + "learning_rate": 1.195404853536576e-06, + "loss": 0.553, + "step": 14011 + }, + { + "epoch": 0.5781959230832714, + "grad_norm": 1.6691247878568725, + "learning_rate": 1.1952085570525608e-06, + "loss": 0.5034, + "step": 14012 + }, + { + "epoch": 0.5782371874226294, + "grad_norm": 3.754281955716317, + "learning_rate": 1.1950122660128836e-06, + "loss": 0.5874, + "step": 14013 + }, + { + "epoch": 0.5782784517619873, + "grad_norm": 3.495000990266122, + "learning_rate": 1.1948159804210496e-06, + "loss": 0.4836, + "step": 14014 + }, + { + "epoch": 0.5783197161013452, + "grad_norm": 3.372086455507623, + "learning_rate": 1.194619700280566e-06, + "loss": 0.5205, + "step": 14015 + }, + { + "epoch": 0.5783609804407032, + "grad_norm": 3.3436417626334802, + "learning_rate": 1.194423425594938e-06, + "loss": 0.5302, + "step": 14016 + }, + { + "epoch": 0.5784022447800611, + "grad_norm": 2.8721957624979653, + "learning_rate": 1.1942271563676726e-06, + "loss": 0.5584, + "step": 14017 + }, + { + "epoch": 0.578443509119419, + "grad_norm": 7.033300834663545, + "learning_rate": 1.1940308926022747e-06, + "loss": 0.465, + "step": 14018 + }, + { + "epoch": 0.5784847734587769, + "grad_norm": 2.880290718775564, + "learning_rate": 1.1938346343022508e-06, + "loss": 0.5228, + "step": 14019 + }, + { + "epoch": 0.5785260377981348, + "grad_norm": 2.4802302948164305, + "learning_rate": 1.1936383814711058e-06, + "loss": 0.499, + "step": 14020 + }, + { + "epoch": 0.5785673021374927, + "grad_norm": 2.231660534098801, + "learning_rate": 1.1934421341123463e-06, + "loss": 0.4913, + "step": 14021 + }, + { + "epoch": 0.5786085664768507, + "grad_norm": 2.8527149384408648, + "learning_rate": 1.1932458922294766e-06, + "loss": 0.4928, + "step": 14022 + }, + { + "epoch": 0.5786498308162087, + "grad_norm": 2.8285112550595013, + "learning_rate": 1.193049655826003e-06, + "loss": 0.5527, + "step": 14023 + }, + { + "epoch": 0.5786910951555666, + "grad_norm": 3.7180351046472815, + "learning_rate": 1.1928534249054302e-06, + "loss": 0.5066, + "step": 14024 + }, + { + "epoch": 0.5787323594949245, + "grad_norm": 2.6542705730110403, + "learning_rate": 1.1926571994712638e-06, + "loss": 0.5046, + "step": 14025 + }, + { + "epoch": 0.5787736238342824, + "grad_norm": 15.225413451447297, + "learning_rate": 1.1924609795270091e-06, + "loss": 0.5546, + "step": 14026 + }, + { + "epoch": 0.5788148881736404, + "grad_norm": 1.953758995428561, + "learning_rate": 1.1922647650761703e-06, + "loss": 0.5557, + "step": 14027 + }, + { + "epoch": 0.5788561525129983, + "grad_norm": 2.426735964399641, + "learning_rate": 1.1920685561222523e-06, + "loss": 0.5347, + "step": 14028 + }, + { + "epoch": 0.5788974168523562, + "grad_norm": 3.042039310476383, + "learning_rate": 1.1918723526687605e-06, + "loss": 0.5156, + "step": 14029 + }, + { + "epoch": 0.5789386811917141, + "grad_norm": 10.377089060136246, + "learning_rate": 1.191676154719199e-06, + "loss": 0.497, + "step": 14030 + }, + { + "epoch": 0.578979945531072, + "grad_norm": 2.6790149641643373, + "learning_rate": 1.191479962277073e-06, + "loss": 0.5719, + "step": 14031 + }, + { + "epoch": 0.5790212098704299, + "grad_norm": 3.8705028792216183, + "learning_rate": 1.1912837753458866e-06, + "loss": 0.504, + "step": 14032 + }, + { + "epoch": 0.5790624742097878, + "grad_norm": 5.219372829781381, + "learning_rate": 1.1910875939291442e-06, + "loss": 0.4983, + "step": 14033 + }, + { + "epoch": 0.5791037385491459, + "grad_norm": 2.5211146393569117, + "learning_rate": 1.19089141803035e-06, + "loss": 0.5357, + "step": 14034 + }, + { + "epoch": 0.5791450028885038, + "grad_norm": 4.827655548260979, + "learning_rate": 1.1906952476530087e-06, + "loss": 0.5588, + "step": 14035 + }, + { + "epoch": 0.5791862672278617, + "grad_norm": 3.055843297241893, + "learning_rate": 1.1904990828006237e-06, + "loss": 0.4945, + "step": 14036 + }, + { + "epoch": 0.5792275315672196, + "grad_norm": 1.7423034350545379, + "learning_rate": 1.1903029234766995e-06, + "loss": 0.5651, + "step": 14037 + }, + { + "epoch": 0.5792687959065775, + "grad_norm": 3.8517495677417903, + "learning_rate": 1.1901067696847397e-06, + "loss": 0.5322, + "step": 14038 + }, + { + "epoch": 0.5793100602459355, + "grad_norm": 3.7919159231113233, + "learning_rate": 1.1899106214282488e-06, + "loss": 0.4692, + "step": 14039 + }, + { + "epoch": 0.5793513245852934, + "grad_norm": 6.8337925083430475, + "learning_rate": 1.1897144787107295e-06, + "loss": 0.5151, + "step": 14040 + }, + { + "epoch": 0.5793925889246513, + "grad_norm": 3.6632992695266817, + "learning_rate": 1.1895183415356858e-06, + "loss": 0.5098, + "step": 14041 + }, + { + "epoch": 0.5794338532640092, + "grad_norm": 2.542317958896498, + "learning_rate": 1.1893222099066215e-06, + "loss": 0.5552, + "step": 14042 + }, + { + "epoch": 0.5794751176033671, + "grad_norm": 3.2876054786049322, + "learning_rate": 1.189126083827039e-06, + "loss": 0.5137, + "step": 14043 + }, + { + "epoch": 0.5795163819427251, + "grad_norm": 6.065575095443958, + "learning_rate": 1.1889299633004431e-06, + "loss": 0.5363, + "step": 14044 + }, + { + "epoch": 0.5795576462820831, + "grad_norm": 4.093236920077581, + "learning_rate": 1.1887338483303356e-06, + "loss": 0.5486, + "step": 14045 + }, + { + "epoch": 0.579598910621441, + "grad_norm": 2.738769886430974, + "learning_rate": 1.1885377389202206e-06, + "loss": 0.4957, + "step": 14046 + }, + { + "epoch": 0.5796401749607989, + "grad_norm": 2.117963176090627, + "learning_rate": 1.188341635073601e-06, + "loss": 0.514, + "step": 14047 + }, + { + "epoch": 0.5796814393001568, + "grad_norm": 3.036623729629254, + "learning_rate": 1.188145536793979e-06, + "loss": 0.5147, + "step": 14048 + }, + { + "epoch": 0.5797227036395147, + "grad_norm": 3.5073563474398974, + "learning_rate": 1.1879494440848582e-06, + "loss": 0.5239, + "step": 14049 + }, + { + "epoch": 0.5797639679788726, + "grad_norm": 2.284871600073553, + "learning_rate": 1.1877533569497404e-06, + "loss": 0.5147, + "step": 14050 + }, + { + "epoch": 0.5798052323182306, + "grad_norm": 2.159800783757635, + "learning_rate": 1.1875572753921294e-06, + "loss": 0.5051, + "step": 14051 + }, + { + "epoch": 0.5798464966575885, + "grad_norm": 3.6447825174567265, + "learning_rate": 1.1873611994155264e-06, + "loss": 0.5597, + "step": 14052 + }, + { + "epoch": 0.5798877609969464, + "grad_norm": 2.4317122492626666, + "learning_rate": 1.1871651290234356e-06, + "loss": 0.515, + "step": 14053 + }, + { + "epoch": 0.5799290253363044, + "grad_norm": 2.478516585137256, + "learning_rate": 1.1869690642193574e-06, + "loss": 0.5026, + "step": 14054 + }, + { + "epoch": 0.5799702896756623, + "grad_norm": 4.082966302231318, + "learning_rate": 1.1867730050067947e-06, + "loss": 0.4936, + "step": 14055 + }, + { + "epoch": 0.5800115540150202, + "grad_norm": 3.6818332582800437, + "learning_rate": 1.18657695138925e-06, + "loss": 0.5432, + "step": 14056 + }, + { + "epoch": 0.5800528183543782, + "grad_norm": 5.7792772589912245, + "learning_rate": 1.1863809033702248e-06, + "loss": 0.5229, + "step": 14057 + }, + { + "epoch": 0.5800940826937361, + "grad_norm": 2.071521457606696, + "learning_rate": 1.1861848609532213e-06, + "loss": 0.5638, + "step": 14058 + }, + { + "epoch": 0.580135347033094, + "grad_norm": 4.115984492101988, + "learning_rate": 1.185988824141741e-06, + "loss": 0.5308, + "step": 14059 + }, + { + "epoch": 0.5801766113724519, + "grad_norm": 3.2481374550205313, + "learning_rate": 1.1857927929392865e-06, + "loss": 0.5814, + "step": 14060 + }, + { + "epoch": 0.5802178757118098, + "grad_norm": 2.829734621571344, + "learning_rate": 1.185596767349358e-06, + "loss": 0.4832, + "step": 14061 + }, + { + "epoch": 0.5802591400511677, + "grad_norm": 2.9011380040394723, + "learning_rate": 1.1854007473754581e-06, + "loss": 0.5899, + "step": 14062 + }, + { + "epoch": 0.5803004043905257, + "grad_norm": 3.56819927033283, + "learning_rate": 1.1852047330210877e-06, + "loss": 0.5652, + "step": 14063 + }, + { + "epoch": 0.5803416687298837, + "grad_norm": 2.0184263503060187, + "learning_rate": 1.1850087242897486e-06, + "loss": 0.5005, + "step": 14064 + }, + { + "epoch": 0.5803829330692416, + "grad_norm": 16.807933533020854, + "learning_rate": 1.1848127211849411e-06, + "loss": 0.4672, + "step": 14065 + }, + { + "epoch": 0.5804241974085995, + "grad_norm": 3.54581295208519, + "learning_rate": 1.1846167237101681e-06, + "loss": 0.5022, + "step": 14066 + }, + { + "epoch": 0.5804654617479574, + "grad_norm": 2.576990130980627, + "learning_rate": 1.1844207318689285e-06, + "loss": 0.519, + "step": 14067 + }, + { + "epoch": 0.5805067260873154, + "grad_norm": 2.2328491995313073, + "learning_rate": 1.1842247456647243e-06, + "loss": 0.5303, + "step": 14068 + }, + { + "epoch": 0.5805479904266733, + "grad_norm": 2.1460807214871673, + "learning_rate": 1.1840287651010555e-06, + "loss": 0.5106, + "step": 14069 + }, + { + "epoch": 0.5805892547660312, + "grad_norm": 3.850776406516795, + "learning_rate": 1.183832790181424e-06, + "loss": 0.5446, + "step": 14070 + }, + { + "epoch": 0.5806305191053891, + "grad_norm": 3.9754748646222975, + "learning_rate": 1.1836368209093294e-06, + "loss": 0.5401, + "step": 14071 + }, + { + "epoch": 0.580671783444747, + "grad_norm": 3.7084903349998717, + "learning_rate": 1.1834408572882729e-06, + "loss": 0.5272, + "step": 14072 + }, + { + "epoch": 0.5807130477841049, + "grad_norm": 4.834382028618128, + "learning_rate": 1.1832448993217542e-06, + "loss": 0.491, + "step": 14073 + }, + { + "epoch": 0.580754312123463, + "grad_norm": 2.519575801248645, + "learning_rate": 1.1830489470132744e-06, + "loss": 0.5724, + "step": 14074 + }, + { + "epoch": 0.5807955764628209, + "grad_norm": 2.511368994799634, + "learning_rate": 1.182853000366333e-06, + "loss": 0.5611, + "step": 14075 + }, + { + "epoch": 0.5808368408021788, + "grad_norm": 2.941945235182344, + "learning_rate": 1.1826570593844309e-06, + "loss": 0.5982, + "step": 14076 + }, + { + "epoch": 0.5808781051415367, + "grad_norm": 2.8740765078464525, + "learning_rate": 1.182461124071067e-06, + "loss": 0.5214, + "step": 14077 + }, + { + "epoch": 0.5809193694808946, + "grad_norm": 1.95919833887203, + "learning_rate": 1.1822651944297422e-06, + "loss": 0.533, + "step": 14078 + }, + { + "epoch": 0.5809606338202525, + "grad_norm": 2.881850193296953, + "learning_rate": 1.1820692704639562e-06, + "loss": 0.4898, + "step": 14079 + }, + { + "epoch": 0.5810018981596105, + "grad_norm": 6.691397240512783, + "learning_rate": 1.1818733521772077e-06, + "loss": 0.5568, + "step": 14080 + }, + { + "epoch": 0.5810431624989684, + "grad_norm": 3.3379526318223687, + "learning_rate": 1.1816774395729973e-06, + "loss": 0.5803, + "step": 14081 + }, + { + "epoch": 0.5810844268383263, + "grad_norm": 9.098855123728164, + "learning_rate": 1.1814815326548238e-06, + "loss": 0.474, + "step": 14082 + }, + { + "epoch": 0.5811256911776842, + "grad_norm": 2.4469512309946886, + "learning_rate": 1.1812856314261873e-06, + "loss": 0.5299, + "step": 14083 + }, + { + "epoch": 0.5811669555170422, + "grad_norm": 3.0365829381404525, + "learning_rate": 1.1810897358905865e-06, + "loss": 0.5456, + "step": 14084 + }, + { + "epoch": 0.5812082198564001, + "grad_norm": 3.842194292373662, + "learning_rate": 1.180893846051521e-06, + "loss": 0.4705, + "step": 14085 + }, + { + "epoch": 0.5812494841957581, + "grad_norm": 1.8414849881744098, + "learning_rate": 1.1806979619124896e-06, + "loss": 0.5058, + "step": 14086 + }, + { + "epoch": 0.581290748535116, + "grad_norm": 2.8081361882777323, + "learning_rate": 1.1805020834769911e-06, + "loss": 0.5372, + "step": 14087 + }, + { + "epoch": 0.5813320128744739, + "grad_norm": 2.2482210180445805, + "learning_rate": 1.1803062107485252e-06, + "loss": 0.525, + "step": 14088 + }, + { + "epoch": 0.5813732772138318, + "grad_norm": 4.462630404440024, + "learning_rate": 1.1801103437305895e-06, + "loss": 0.5446, + "step": 14089 + }, + { + "epoch": 0.5814145415531897, + "grad_norm": 4.173122655532156, + "learning_rate": 1.1799144824266837e-06, + "loss": 0.562, + "step": 14090 + }, + { + "epoch": 0.5814558058925476, + "grad_norm": 2.388003087657652, + "learning_rate": 1.179718626840306e-06, + "loss": 0.5185, + "step": 14091 + }, + { + "epoch": 0.5814970702319056, + "grad_norm": 4.282263308021364, + "learning_rate": 1.179522776974955e-06, + "loss": 0.4914, + "step": 14092 + }, + { + "epoch": 0.5815383345712635, + "grad_norm": 5.958290441942285, + "learning_rate": 1.1793269328341288e-06, + "loss": 0.518, + "step": 14093 + }, + { + "epoch": 0.5815795989106214, + "grad_norm": 2.8833688709794307, + "learning_rate": 1.1791310944213254e-06, + "loss": 0.5523, + "step": 14094 + }, + { + "epoch": 0.5816208632499794, + "grad_norm": 2.1832967994494927, + "learning_rate": 1.1789352617400437e-06, + "loss": 0.5291, + "step": 14095 + }, + { + "epoch": 0.5816621275893373, + "grad_norm": 1.7072065522489794, + "learning_rate": 1.1787394347937812e-06, + "loss": 0.5355, + "step": 14096 + }, + { + "epoch": 0.5817033919286952, + "grad_norm": 3.5617717004376677, + "learning_rate": 1.1785436135860362e-06, + "loss": 0.5387, + "step": 14097 + }, + { + "epoch": 0.5817446562680532, + "grad_norm": 5.250740183431118, + "learning_rate": 1.1783477981203062e-06, + "loss": 0.5074, + "step": 14098 + }, + { + "epoch": 0.5817859206074111, + "grad_norm": 5.807538387519841, + "learning_rate": 1.1781519884000894e-06, + "loss": 0.5106, + "step": 14099 + }, + { + "epoch": 0.581827184946769, + "grad_norm": 3.251505004581372, + "learning_rate": 1.1779561844288831e-06, + "loss": 0.5942, + "step": 14100 + }, + { + "epoch": 0.5818684492861269, + "grad_norm": 2.5786420928534146, + "learning_rate": 1.1777603862101851e-06, + "loss": 0.5414, + "step": 14101 + }, + { + "epoch": 0.5819097136254848, + "grad_norm": 2.723747856968614, + "learning_rate": 1.1775645937474925e-06, + "loss": 0.5156, + "step": 14102 + }, + { + "epoch": 0.5819509779648427, + "grad_norm": 2.7330000002451773, + "learning_rate": 1.1773688070443034e-06, + "loss": 0.4275, + "step": 14103 + }, + { + "epoch": 0.5819922423042007, + "grad_norm": 2.8931693777921876, + "learning_rate": 1.1771730261041138e-06, + "loss": 0.5332, + "step": 14104 + }, + { + "epoch": 0.5820335066435587, + "grad_norm": 3.1883835903744946, + "learning_rate": 1.1769772509304227e-06, + "loss": 0.5174, + "step": 14105 + }, + { + "epoch": 0.5820747709829166, + "grad_norm": 2.574901107392924, + "learning_rate": 1.176781481526725e-06, + "loss": 0.5502, + "step": 14106 + }, + { + "epoch": 0.5821160353222745, + "grad_norm": 10.926550264876482, + "learning_rate": 1.176585717896519e-06, + "loss": 0.5502, + "step": 14107 + }, + { + "epoch": 0.5821572996616324, + "grad_norm": 1.7498234252776743, + "learning_rate": 1.1763899600433006e-06, + "loss": 0.4997, + "step": 14108 + }, + { + "epoch": 0.5821985640009903, + "grad_norm": 3.66310831790326, + "learning_rate": 1.1761942079705677e-06, + "loss": 0.5115, + "step": 14109 + }, + { + "epoch": 0.5822398283403483, + "grad_norm": 2.680123686447109, + "learning_rate": 1.1759984616818156e-06, + "loss": 0.5313, + "step": 14110 + }, + { + "epoch": 0.5822810926797062, + "grad_norm": 3.871416840446229, + "learning_rate": 1.175802721180542e-06, + "loss": 0.5498, + "step": 14111 + }, + { + "epoch": 0.5823223570190641, + "grad_norm": 2.027270494558053, + "learning_rate": 1.1756069864702424e-06, + "loss": 0.5316, + "step": 14112 + }, + { + "epoch": 0.582363621358422, + "grad_norm": 3.19969747575023, + "learning_rate": 1.175411257554414e-06, + "loss": 0.4636, + "step": 14113 + }, + { + "epoch": 0.5824048856977799, + "grad_norm": 3.0638698635242685, + "learning_rate": 1.1752155344365523e-06, + "loss": 0.4972, + "step": 14114 + }, + { + "epoch": 0.582446150037138, + "grad_norm": 2.991684176626525, + "learning_rate": 1.1750198171201537e-06, + "loss": 0.5304, + "step": 14115 + }, + { + "epoch": 0.5824874143764959, + "grad_norm": 2.3184467368660586, + "learning_rate": 1.174824105608714e-06, + "loss": 0.5458, + "step": 14116 + }, + { + "epoch": 0.5825286787158538, + "grad_norm": 5.605217972867747, + "learning_rate": 1.1746283999057295e-06, + "loss": 0.4887, + "step": 14117 + }, + { + "epoch": 0.5825699430552117, + "grad_norm": 3.782743405879182, + "learning_rate": 1.1744327000146962e-06, + "loss": 0.5277, + "step": 14118 + }, + { + "epoch": 0.5826112073945696, + "grad_norm": 2.6276431137611054, + "learning_rate": 1.1742370059391084e-06, + "loss": 0.5551, + "step": 14119 + }, + { + "epoch": 0.5826524717339275, + "grad_norm": 2.813440521601128, + "learning_rate": 1.174041317682463e-06, + "loss": 0.5515, + "step": 14120 + }, + { + "epoch": 0.5826937360732855, + "grad_norm": 8.007357297853636, + "learning_rate": 1.173845635248255e-06, + "loss": 0.4907, + "step": 14121 + }, + { + "epoch": 0.5827350004126434, + "grad_norm": 7.692035932163758, + "learning_rate": 1.17364995863998e-06, + "loss": 0.5491, + "step": 14122 + }, + { + "epoch": 0.5827762647520013, + "grad_norm": 3.08628011232997, + "learning_rate": 1.1734542878611328e-06, + "loss": 0.4848, + "step": 14123 + }, + { + "epoch": 0.5828175290913592, + "grad_norm": 3.3241931544195324, + "learning_rate": 1.1732586229152092e-06, + "loss": 0.5076, + "step": 14124 + }, + { + "epoch": 0.5828587934307172, + "grad_norm": 2.4147329876954324, + "learning_rate": 1.1730629638057041e-06, + "loss": 0.5006, + "step": 14125 + }, + { + "epoch": 0.5829000577700751, + "grad_norm": 2.783344698629587, + "learning_rate": 1.172867310536112e-06, + "loss": 0.5469, + "step": 14126 + }, + { + "epoch": 0.5829413221094331, + "grad_norm": 11.505152362069335, + "learning_rate": 1.1726716631099284e-06, + "loss": 0.5263, + "step": 14127 + }, + { + "epoch": 0.582982586448791, + "grad_norm": 1.8899764846258142, + "learning_rate": 1.1724760215306474e-06, + "loss": 0.5171, + "step": 14128 + }, + { + "epoch": 0.5830238507881489, + "grad_norm": 2.312280924391912, + "learning_rate": 1.1722803858017642e-06, + "loss": 0.5615, + "step": 14129 + }, + { + "epoch": 0.5830651151275068, + "grad_norm": 1.8726518606820877, + "learning_rate": 1.172084755926773e-06, + "loss": 0.5149, + "step": 14130 + }, + { + "epoch": 0.5831063794668647, + "grad_norm": 4.4960692777099736, + "learning_rate": 1.171889131909169e-06, + "loss": 0.5349, + "step": 14131 + }, + { + "epoch": 0.5831476438062226, + "grad_norm": 3.689397355733424, + "learning_rate": 1.1716935137524454e-06, + "loss": 0.5427, + "step": 14132 + }, + { + "epoch": 0.5831889081455806, + "grad_norm": 1.9767543592786259, + "learning_rate": 1.1714979014600971e-06, + "loss": 0.4656, + "step": 14133 + }, + { + "epoch": 0.5832301724849385, + "grad_norm": 3.921250005109651, + "learning_rate": 1.171302295035618e-06, + "loss": 0.5588, + "step": 14134 + }, + { + "epoch": 0.5832714368242965, + "grad_norm": 4.082595723955694, + "learning_rate": 1.171106694482502e-06, + "loss": 0.5969, + "step": 14135 + }, + { + "epoch": 0.5833127011636544, + "grad_norm": 2.490360548088533, + "learning_rate": 1.1709110998042434e-06, + "loss": 0.5283, + "step": 14136 + }, + { + "epoch": 0.5833539655030123, + "grad_norm": 2.582008519795734, + "learning_rate": 1.1707155110043354e-06, + "loss": 0.4697, + "step": 14137 + }, + { + "epoch": 0.5833952298423702, + "grad_norm": 4.582422634452656, + "learning_rate": 1.1705199280862728e-06, + "loss": 0.5369, + "step": 14138 + }, + { + "epoch": 0.5834364941817282, + "grad_norm": 1.990502549771401, + "learning_rate": 1.1703243510535477e-06, + "loss": 0.4821, + "step": 14139 + }, + { + "epoch": 0.5834777585210861, + "grad_norm": 3.9909508680962626, + "learning_rate": 1.170128779909655e-06, + "loss": 0.5092, + "step": 14140 + }, + { + "epoch": 0.583519022860444, + "grad_norm": 3.9779217765113373, + "learning_rate": 1.1699332146580874e-06, + "loss": 0.4817, + "step": 14141 + }, + { + "epoch": 0.5835602871998019, + "grad_norm": 2.5059170582380945, + "learning_rate": 1.1697376553023384e-06, + "loss": 0.4508, + "step": 14142 + }, + { + "epoch": 0.5836015515391598, + "grad_norm": 3.156523179220854, + "learning_rate": 1.1695421018459008e-06, + "loss": 0.5151, + "step": 14143 + }, + { + "epoch": 0.5836428158785177, + "grad_norm": 2.815038273518426, + "learning_rate": 1.169346554292268e-06, + "loss": 0.549, + "step": 14144 + }, + { + "epoch": 0.5836840802178758, + "grad_norm": 2.4962304648702616, + "learning_rate": 1.1691510126449333e-06, + "loss": 0.5325, + "step": 14145 + }, + { + "epoch": 0.5837253445572337, + "grad_norm": 2.635308310888104, + "learning_rate": 1.1689554769073891e-06, + "loss": 0.5797, + "step": 14146 + }, + { + "epoch": 0.5837666088965916, + "grad_norm": 6.49724861241145, + "learning_rate": 1.1687599470831278e-06, + "loss": 0.4865, + "step": 14147 + }, + { + "epoch": 0.5838078732359495, + "grad_norm": 8.596792143189171, + "learning_rate": 1.1685644231756428e-06, + "loss": 0.4789, + "step": 14148 + }, + { + "epoch": 0.5838491375753074, + "grad_norm": 2.2202661779639086, + "learning_rate": 1.168368905188426e-06, + "loss": 0.5554, + "step": 14149 + }, + { + "epoch": 0.5838904019146653, + "grad_norm": 2.7028058046175754, + "learning_rate": 1.1681733931249706e-06, + "loss": 0.6306, + "step": 14150 + }, + { + "epoch": 0.5839316662540233, + "grad_norm": 3.3435537254948136, + "learning_rate": 1.167977886988768e-06, + "loss": 0.5541, + "step": 14151 + }, + { + "epoch": 0.5839729305933812, + "grad_norm": 2.080724717638785, + "learning_rate": 1.1677823867833114e-06, + "loss": 0.4472, + "step": 14152 + }, + { + "epoch": 0.5840141949327391, + "grad_norm": 2.477201080040837, + "learning_rate": 1.167586892512092e-06, + "loss": 0.4525, + "step": 14153 + }, + { + "epoch": 0.584055459272097, + "grad_norm": 1.8593213348458983, + "learning_rate": 1.1673914041786026e-06, + "loss": 0.5081, + "step": 14154 + }, + { + "epoch": 0.5840967236114549, + "grad_norm": 2.0765566436674905, + "learning_rate": 1.1671959217863345e-06, + "loss": 0.5383, + "step": 14155 + }, + { + "epoch": 0.584137987950813, + "grad_norm": 5.517788928957888, + "learning_rate": 1.1670004453387802e-06, + "loss": 0.476, + "step": 14156 + }, + { + "epoch": 0.5841792522901709, + "grad_norm": 2.8576420734527543, + "learning_rate": 1.1668049748394304e-06, + "loss": 0.5024, + "step": 14157 + }, + { + "epoch": 0.5842205166295288, + "grad_norm": 2.4142686203614208, + "learning_rate": 1.1666095102917782e-06, + "loss": 0.5172, + "step": 14158 + }, + { + "epoch": 0.5842617809688867, + "grad_norm": 2.8321463248436505, + "learning_rate": 1.1664140516993138e-06, + "loss": 0.5644, + "step": 14159 + }, + { + "epoch": 0.5843030453082446, + "grad_norm": 2.189472201272562, + "learning_rate": 1.1662185990655286e-06, + "loss": 0.5446, + "step": 14160 + }, + { + "epoch": 0.5843443096476025, + "grad_norm": 2.5656943093377986, + "learning_rate": 1.1660231523939144e-06, + "loss": 0.4547, + "step": 14161 + }, + { + "epoch": 0.5843855739869604, + "grad_norm": 1.9453117081711009, + "learning_rate": 1.1658277116879618e-06, + "loss": 0.4572, + "step": 14162 + }, + { + "epoch": 0.5844268383263184, + "grad_norm": 2.330205020031109, + "learning_rate": 1.1656322769511628e-06, + "loss": 0.5223, + "step": 14163 + }, + { + "epoch": 0.5844681026656763, + "grad_norm": 2.645900212581559, + "learning_rate": 1.1654368481870077e-06, + "loss": 0.532, + "step": 14164 + }, + { + "epoch": 0.5845093670050342, + "grad_norm": 3.6455707357321905, + "learning_rate": 1.1652414253989871e-06, + "loss": 0.5826, + "step": 14165 + }, + { + "epoch": 0.5845506313443922, + "grad_norm": 3.6068582864762324, + "learning_rate": 1.1650460085905925e-06, + "loss": 0.4915, + "step": 14166 + }, + { + "epoch": 0.5845918956837501, + "grad_norm": 4.456578926881068, + "learning_rate": 1.1648505977653136e-06, + "loss": 0.542, + "step": 14167 + }, + { + "epoch": 0.5846331600231081, + "grad_norm": 2.27855084532469, + "learning_rate": 1.1646551929266421e-06, + "loss": 0.489, + "step": 14168 + }, + { + "epoch": 0.584674424362466, + "grad_norm": 4.298962528489512, + "learning_rate": 1.164459794078067e-06, + "loss": 0.5475, + "step": 14169 + }, + { + "epoch": 0.5847156887018239, + "grad_norm": 3.1203540793830045, + "learning_rate": 1.16426440122308e-06, + "loss": 0.49, + "step": 14170 + }, + { + "epoch": 0.5847569530411818, + "grad_norm": 2.95970358212254, + "learning_rate": 1.164069014365171e-06, + "loss": 0.4802, + "step": 14171 + }, + { + "epoch": 0.5847982173805397, + "grad_norm": 3.157747997452903, + "learning_rate": 1.163873633507829e-06, + "loss": 0.5215, + "step": 14172 + }, + { + "epoch": 0.5848394817198976, + "grad_norm": 2.1717742799538167, + "learning_rate": 1.1636782586545452e-06, + "loss": 0.5364, + "step": 14173 + }, + { + "epoch": 0.5848807460592556, + "grad_norm": 2.8987869967910807, + "learning_rate": 1.1634828898088085e-06, + "loss": 0.6105, + "step": 14174 + }, + { + "epoch": 0.5849220103986135, + "grad_norm": 8.599109133556356, + "learning_rate": 1.1632875269741097e-06, + "loss": 0.5409, + "step": 14175 + }, + { + "epoch": 0.5849632747379715, + "grad_norm": 2.4216881148868654, + "learning_rate": 1.1630921701539375e-06, + "loss": 0.54, + "step": 14176 + }, + { + "epoch": 0.5850045390773294, + "grad_norm": 2.5932992026193387, + "learning_rate": 1.1628968193517826e-06, + "loss": 0.502, + "step": 14177 + }, + { + "epoch": 0.5850458034166873, + "grad_norm": 2.4611041119374337, + "learning_rate": 1.1627014745711333e-06, + "loss": 0.4813, + "step": 14178 + }, + { + "epoch": 0.5850870677560452, + "grad_norm": 6.990615130802072, + "learning_rate": 1.16250613581548e-06, + "loss": 0.5354, + "step": 14179 + }, + { + "epoch": 0.5851283320954032, + "grad_norm": 2.998340594599199, + "learning_rate": 1.1623108030883106e-06, + "loss": 0.5455, + "step": 14180 + }, + { + "epoch": 0.5851695964347611, + "grad_norm": 2.4803057180368056, + "learning_rate": 1.1621154763931157e-06, + "loss": 0.5841, + "step": 14181 + }, + { + "epoch": 0.585210860774119, + "grad_norm": 2.876384513475458, + "learning_rate": 1.1619201557333834e-06, + "loss": 0.5612, + "step": 14182 + }, + { + "epoch": 0.5852521251134769, + "grad_norm": 1.9910305243960298, + "learning_rate": 1.161724841112603e-06, + "loss": 0.5295, + "step": 14183 + }, + { + "epoch": 0.5852933894528348, + "grad_norm": 1.9973775646676364, + "learning_rate": 1.1615295325342635e-06, + "loss": 0.4977, + "step": 14184 + }, + { + "epoch": 0.5853346537921927, + "grad_norm": 2.252956823851874, + "learning_rate": 1.161334230001853e-06, + "loss": 0.4939, + "step": 14185 + }, + { + "epoch": 0.5853759181315508, + "grad_norm": 2.57686590527086, + "learning_rate": 1.1611389335188599e-06, + "loss": 0.5517, + "step": 14186 + }, + { + "epoch": 0.5854171824709087, + "grad_norm": 4.52985539337234, + "learning_rate": 1.1609436430887737e-06, + "loss": 0.5174, + "step": 14187 + }, + { + "epoch": 0.5854584468102666, + "grad_norm": 2.0122311681470233, + "learning_rate": 1.1607483587150818e-06, + "loss": 0.566, + "step": 14188 + }, + { + "epoch": 0.5854997111496245, + "grad_norm": 2.3510298937418614, + "learning_rate": 1.1605530804012732e-06, + "loss": 0.4165, + "step": 14189 + }, + { + "epoch": 0.5855409754889824, + "grad_norm": 3.6981475140324114, + "learning_rate": 1.1603578081508354e-06, + "loss": 0.5047, + "step": 14190 + }, + { + "epoch": 0.5855822398283403, + "grad_norm": 4.453257429614129, + "learning_rate": 1.1601625419672573e-06, + "loss": 0.4803, + "step": 14191 + }, + { + "epoch": 0.5856235041676983, + "grad_norm": 2.289651272128778, + "learning_rate": 1.1599672818540257e-06, + "loss": 0.6006, + "step": 14192 + }, + { + "epoch": 0.5856647685070562, + "grad_norm": 2.8518511954285213, + "learning_rate": 1.1597720278146297e-06, + "loss": 0.5179, + "step": 14193 + }, + { + "epoch": 0.5857060328464141, + "grad_norm": 3.3573244088358054, + "learning_rate": 1.1595767798525558e-06, + "loss": 0.527, + "step": 14194 + }, + { + "epoch": 0.585747297185772, + "grad_norm": 2.104854243416464, + "learning_rate": 1.1593815379712928e-06, + "loss": 0.4812, + "step": 14195 + }, + { + "epoch": 0.58578856152513, + "grad_norm": 2.563167671630824, + "learning_rate": 1.1591863021743271e-06, + "loss": 0.5533, + "step": 14196 + }, + { + "epoch": 0.585829825864488, + "grad_norm": 4.115968562952952, + "learning_rate": 1.1589910724651476e-06, + "loss": 0.5377, + "step": 14197 + }, + { + "epoch": 0.5858710902038459, + "grad_norm": 4.2742632957033395, + "learning_rate": 1.15879584884724e-06, + "loss": 0.5473, + "step": 14198 + }, + { + "epoch": 0.5859123545432038, + "grad_norm": 2.6436112902369238, + "learning_rate": 1.1586006313240918e-06, + "loss": 0.4997, + "step": 14199 + }, + { + "epoch": 0.5859536188825617, + "grad_norm": 3.93645403420978, + "learning_rate": 1.1584054198991908e-06, + "loss": 0.4972, + "step": 14200 + }, + { + "epoch": 0.5859948832219196, + "grad_norm": 2.529603219611822, + "learning_rate": 1.1582102145760232e-06, + "loss": 0.5418, + "step": 14201 + }, + { + "epoch": 0.5860361475612775, + "grad_norm": 2.083305389762608, + "learning_rate": 1.1580150153580765e-06, + "loss": 0.5359, + "step": 14202 + }, + { + "epoch": 0.5860774119006354, + "grad_norm": 2.439942359519408, + "learning_rate": 1.1578198222488372e-06, + "loss": 0.5587, + "step": 14203 + }, + { + "epoch": 0.5861186762399934, + "grad_norm": 10.196039547388791, + "learning_rate": 1.1576246352517914e-06, + "loss": 0.4934, + "step": 14204 + }, + { + "epoch": 0.5861599405793513, + "grad_norm": 2.37412507484131, + "learning_rate": 1.1574294543704264e-06, + "loss": 0.5773, + "step": 14205 + }, + { + "epoch": 0.5862012049187093, + "grad_norm": 2.9935019984128073, + "learning_rate": 1.1572342796082284e-06, + "loss": 0.5085, + "step": 14206 + }, + { + "epoch": 0.5862424692580672, + "grad_norm": 2.4515258633379444, + "learning_rate": 1.1570391109686835e-06, + "loss": 0.5383, + "step": 14207 + }, + { + "epoch": 0.5862837335974251, + "grad_norm": 3.414269017804069, + "learning_rate": 1.156843948455278e-06, + "loss": 0.5255, + "step": 14208 + }, + { + "epoch": 0.586324997936783, + "grad_norm": 2.5047966302462297, + "learning_rate": 1.156648792071498e-06, + "loss": 0.5166, + "step": 14209 + }, + { + "epoch": 0.586366262276141, + "grad_norm": 2.438023762209403, + "learning_rate": 1.1564536418208303e-06, + "loss": 0.5543, + "step": 14210 + }, + { + "epoch": 0.5864075266154989, + "grad_norm": 3.385991856845229, + "learning_rate": 1.1562584977067592e-06, + "loss": 0.5147, + "step": 14211 + }, + { + "epoch": 0.5864487909548568, + "grad_norm": 2.8478528857810557, + "learning_rate": 1.1560633597327714e-06, + "loss": 0.5257, + "step": 14212 + }, + { + "epoch": 0.5864900552942147, + "grad_norm": 2.993711826201391, + "learning_rate": 1.155868227902352e-06, + "loss": 0.5042, + "step": 14213 + }, + { + "epoch": 0.5865313196335726, + "grad_norm": 6.156819352581779, + "learning_rate": 1.1556731022189875e-06, + "loss": 0.4871, + "step": 14214 + }, + { + "epoch": 0.5865725839729306, + "grad_norm": 3.9025010228951893, + "learning_rate": 1.1554779826861623e-06, + "loss": 0.5516, + "step": 14215 + }, + { + "epoch": 0.5866138483122885, + "grad_norm": 2.212029347970219, + "learning_rate": 1.1552828693073626e-06, + "loss": 0.5255, + "step": 14216 + }, + { + "epoch": 0.5866551126516465, + "grad_norm": 2.233311025768698, + "learning_rate": 1.1550877620860728e-06, + "loss": 0.564, + "step": 14217 + }, + { + "epoch": 0.5866963769910044, + "grad_norm": 4.732160208778457, + "learning_rate": 1.1548926610257787e-06, + "loss": 0.5857, + "step": 14218 + }, + { + "epoch": 0.5867376413303623, + "grad_norm": 5.061698156862553, + "learning_rate": 1.154697566129965e-06, + "loss": 0.5063, + "step": 14219 + }, + { + "epoch": 0.5867789056697202, + "grad_norm": 2.0307681867508784, + "learning_rate": 1.1545024774021165e-06, + "loss": 0.4734, + "step": 14220 + }, + { + "epoch": 0.5868201700090782, + "grad_norm": 2.6548007889211833, + "learning_rate": 1.154307394845718e-06, + "loss": 0.4937, + "step": 14221 + }, + { + "epoch": 0.5868614343484361, + "grad_norm": 3.2707570632361573, + "learning_rate": 1.1541123184642542e-06, + "loss": 0.5655, + "step": 14222 + }, + { + "epoch": 0.586902698687794, + "grad_norm": 2.190697840174897, + "learning_rate": 1.1539172482612103e-06, + "loss": 0.461, + "step": 14223 + }, + { + "epoch": 0.5869439630271519, + "grad_norm": 2.6167338498138024, + "learning_rate": 1.1537221842400697e-06, + "loss": 0.5072, + "step": 14224 + }, + { + "epoch": 0.5869852273665098, + "grad_norm": 3.988868624150569, + "learning_rate": 1.1535271264043168e-06, + "loss": 0.5031, + "step": 14225 + }, + { + "epoch": 0.5870264917058677, + "grad_norm": 20.910804865397605, + "learning_rate": 1.1533320747574367e-06, + "loss": 0.5189, + "step": 14226 + }, + { + "epoch": 0.5870677560452258, + "grad_norm": 3.3080374074830345, + "learning_rate": 1.1531370293029123e-06, + "loss": 0.5035, + "step": 14227 + }, + { + "epoch": 0.5871090203845837, + "grad_norm": 3.2898502338003066, + "learning_rate": 1.1529419900442287e-06, + "loss": 0.5343, + "step": 14228 + }, + { + "epoch": 0.5871502847239416, + "grad_norm": 2.359595821481072, + "learning_rate": 1.1527469569848692e-06, + "loss": 0.4983, + "step": 14229 + }, + { + "epoch": 0.5871915490632995, + "grad_norm": 3.787902946214817, + "learning_rate": 1.152551930128318e-06, + "loss": 0.5329, + "step": 14230 + }, + { + "epoch": 0.5872328134026574, + "grad_norm": 2.6499863907885888, + "learning_rate": 1.152356909478058e-06, + "loss": 0.5313, + "step": 14231 + }, + { + "epoch": 0.5872740777420153, + "grad_norm": 2.646254570628375, + "learning_rate": 1.1521618950375739e-06, + "loss": 0.5248, + "step": 14232 + }, + { + "epoch": 0.5873153420813733, + "grad_norm": 4.082257232000133, + "learning_rate": 1.151966886810348e-06, + "loss": 0.5267, + "step": 14233 + }, + { + "epoch": 0.5873566064207312, + "grad_norm": 2.8342031500782583, + "learning_rate": 1.1517718847998646e-06, + "loss": 0.5242, + "step": 14234 + }, + { + "epoch": 0.5873978707600891, + "grad_norm": 2.9230818266951664, + "learning_rate": 1.151576889009606e-06, + "loss": 0.4703, + "step": 14235 + }, + { + "epoch": 0.587439135099447, + "grad_norm": 5.713885812626561, + "learning_rate": 1.1513818994430562e-06, + "loss": 0.5708, + "step": 14236 + }, + { + "epoch": 0.587480399438805, + "grad_norm": 2.9058955183985544, + "learning_rate": 1.151186916103698e-06, + "loss": 0.5238, + "step": 14237 + }, + { + "epoch": 0.587521663778163, + "grad_norm": 3.3575384520641682, + "learning_rate": 1.1509919389950135e-06, + "loss": 0.5192, + "step": 14238 + }, + { + "epoch": 0.5875629281175209, + "grad_norm": 1.8840253300359027, + "learning_rate": 1.1507969681204865e-06, + "loss": 0.4849, + "step": 14239 + }, + { + "epoch": 0.5876041924568788, + "grad_norm": 4.2643955200511625, + "learning_rate": 1.1506020034835986e-06, + "loss": 0.4765, + "step": 14240 + }, + { + "epoch": 0.5876454567962367, + "grad_norm": 2.0290253306655535, + "learning_rate": 1.1504070450878336e-06, + "loss": 0.4545, + "step": 14241 + }, + { + "epoch": 0.5876867211355946, + "grad_norm": 4.585521161154801, + "learning_rate": 1.1502120929366732e-06, + "loss": 0.5323, + "step": 14242 + }, + { + "epoch": 0.5877279854749525, + "grad_norm": 3.0762047917744075, + "learning_rate": 1.1500171470335995e-06, + "loss": 0.4954, + "step": 14243 + }, + { + "epoch": 0.5877692498143104, + "grad_norm": 2.188385876296018, + "learning_rate": 1.1498222073820955e-06, + "loss": 0.4867, + "step": 14244 + }, + { + "epoch": 0.5878105141536684, + "grad_norm": 6.959912960949032, + "learning_rate": 1.1496272739856424e-06, + "loss": 0.4793, + "step": 14245 + }, + { + "epoch": 0.5878517784930263, + "grad_norm": 8.645082889382522, + "learning_rate": 1.149432346847723e-06, + "loss": 0.5236, + "step": 14246 + }, + { + "epoch": 0.5878930428323843, + "grad_norm": 2.625308371288044, + "learning_rate": 1.1492374259718188e-06, + "loss": 0.5239, + "step": 14247 + }, + { + "epoch": 0.5879343071717422, + "grad_norm": 3.802292126372121, + "learning_rate": 1.1490425113614119e-06, + "loss": 0.5155, + "step": 14248 + }, + { + "epoch": 0.5879755715111001, + "grad_norm": 3.662067736245009, + "learning_rate": 1.1488476030199833e-06, + "loss": 0.4945, + "step": 14249 + }, + { + "epoch": 0.588016835850458, + "grad_norm": 12.821477873263596, + "learning_rate": 1.1486527009510156e-06, + "loss": 0.464, + "step": 14250 + }, + { + "epoch": 0.588058100189816, + "grad_norm": 2.551720472983957, + "learning_rate": 1.1484578051579893e-06, + "loss": 0.4899, + "step": 14251 + }, + { + "epoch": 0.5880993645291739, + "grad_norm": 2.3782662555553773, + "learning_rate": 1.1482629156443855e-06, + "loss": 0.5178, + "step": 14252 + }, + { + "epoch": 0.5881406288685318, + "grad_norm": 2.6262023578069855, + "learning_rate": 1.1480680324136866e-06, + "loss": 0.4966, + "step": 14253 + }, + { + "epoch": 0.5881818932078897, + "grad_norm": 4.113938090587464, + "learning_rate": 1.1478731554693725e-06, + "loss": 0.5024, + "step": 14254 + }, + { + "epoch": 0.5882231575472476, + "grad_norm": 1.8368378181651233, + "learning_rate": 1.1476782848149251e-06, + "loss": 0.4711, + "step": 14255 + }, + { + "epoch": 0.5882644218866055, + "grad_norm": 3.2430415639968753, + "learning_rate": 1.1474834204538246e-06, + "loss": 0.509, + "step": 14256 + }, + { + "epoch": 0.5883056862259636, + "grad_norm": 3.2195321324783137, + "learning_rate": 1.1472885623895524e-06, + "loss": 0.565, + "step": 14257 + }, + { + "epoch": 0.5883469505653215, + "grad_norm": 4.690423198693823, + "learning_rate": 1.1470937106255885e-06, + "loss": 0.5122, + "step": 14258 + }, + { + "epoch": 0.5883882149046794, + "grad_norm": 2.8081125405756966, + "learning_rate": 1.1468988651654139e-06, + "loss": 0.5352, + "step": 14259 + }, + { + "epoch": 0.5884294792440373, + "grad_norm": 2.3962173007989307, + "learning_rate": 1.146704026012509e-06, + "loss": 0.49, + "step": 14260 + }, + { + "epoch": 0.5884707435833952, + "grad_norm": 3.2718284822975527, + "learning_rate": 1.146509193170354e-06, + "loss": 0.4939, + "step": 14261 + }, + { + "epoch": 0.5885120079227532, + "grad_norm": 4.303111648971769, + "learning_rate": 1.1463143666424291e-06, + "loss": 0.5107, + "step": 14262 + }, + { + "epoch": 0.5885532722621111, + "grad_norm": 4.635193711193619, + "learning_rate": 1.1461195464322148e-06, + "loss": 0.6029, + "step": 14263 + }, + { + "epoch": 0.588594536601469, + "grad_norm": 2.393066394098728, + "learning_rate": 1.1459247325431902e-06, + "loss": 0.5185, + "step": 14264 + }, + { + "epoch": 0.5886358009408269, + "grad_norm": 3.1397079135819856, + "learning_rate": 1.1457299249788357e-06, + "loss": 0.5195, + "step": 14265 + }, + { + "epoch": 0.5886770652801848, + "grad_norm": 23.361025071714618, + "learning_rate": 1.1455351237426308e-06, + "loss": 0.4962, + "step": 14266 + }, + { + "epoch": 0.5887183296195428, + "grad_norm": 6.407096809467917, + "learning_rate": 1.1453403288380557e-06, + "loss": 0.495, + "step": 14267 + }, + { + "epoch": 0.5887595939589008, + "grad_norm": 4.193746082992621, + "learning_rate": 1.145145540268589e-06, + "loss": 0.5394, + "step": 14268 + }, + { + "epoch": 0.5888008582982587, + "grad_norm": 2.377518763986594, + "learning_rate": 1.1449507580377111e-06, + "loss": 0.496, + "step": 14269 + }, + { + "epoch": 0.5888421226376166, + "grad_norm": 3.6964652397363116, + "learning_rate": 1.1447559821489006e-06, + "loss": 0.5586, + "step": 14270 + }, + { + "epoch": 0.5888833869769745, + "grad_norm": 2.721506820457588, + "learning_rate": 1.1445612126056372e-06, + "loss": 0.519, + "step": 14271 + }, + { + "epoch": 0.5889246513163324, + "grad_norm": 2.177363902915791, + "learning_rate": 1.1443664494113996e-06, + "loss": 0.51, + "step": 14272 + }, + { + "epoch": 0.5889659156556903, + "grad_norm": 4.10484530070993, + "learning_rate": 1.1441716925696667e-06, + "loss": 0.6261, + "step": 14273 + }, + { + "epoch": 0.5890071799950483, + "grad_norm": 3.1114076538349256, + "learning_rate": 1.1439769420839179e-06, + "loss": 0.5876, + "step": 14274 + }, + { + "epoch": 0.5890484443344062, + "grad_norm": 2.099889708793312, + "learning_rate": 1.1437821979576312e-06, + "loss": 0.5001, + "step": 14275 + }, + { + "epoch": 0.5890897086737641, + "grad_norm": 2.691380799526984, + "learning_rate": 1.1435874601942863e-06, + "loss": 0.531, + "step": 14276 + }, + { + "epoch": 0.589130973013122, + "grad_norm": 2.7620304969913123, + "learning_rate": 1.1433927287973602e-06, + "loss": 0.5076, + "step": 14277 + }, + { + "epoch": 0.58917223735248, + "grad_norm": 3.179101218195407, + "learning_rate": 1.1431980037703324e-06, + "loss": 0.58, + "step": 14278 + }, + { + "epoch": 0.589213501691838, + "grad_norm": 2.8638463463871275, + "learning_rate": 1.1430032851166803e-06, + "loss": 0.5188, + "step": 14279 + }, + { + "epoch": 0.5892547660311959, + "grad_norm": 1.9483700396763184, + "learning_rate": 1.1428085728398832e-06, + "loss": 0.4899, + "step": 14280 + }, + { + "epoch": 0.5892960303705538, + "grad_norm": 4.350478339044412, + "learning_rate": 1.1426138669434185e-06, + "loss": 0.5187, + "step": 14281 + }, + { + "epoch": 0.5893372947099117, + "grad_norm": 7.70282588550499, + "learning_rate": 1.1424191674307637e-06, + "loss": 0.5428, + "step": 14282 + }, + { + "epoch": 0.5893785590492696, + "grad_norm": 1.9457560863169776, + "learning_rate": 1.1422244743053976e-06, + "loss": 0.5078, + "step": 14283 + }, + { + "epoch": 0.5894198233886275, + "grad_norm": 2.804602218894962, + "learning_rate": 1.1420297875707969e-06, + "loss": 0.551, + "step": 14284 + }, + { + "epoch": 0.5894610877279854, + "grad_norm": 4.564455061892592, + "learning_rate": 1.1418351072304402e-06, + "loss": 0.4983, + "step": 14285 + }, + { + "epoch": 0.5895023520673434, + "grad_norm": 2.1921831929435935, + "learning_rate": 1.141640433287804e-06, + "loss": 0.5159, + "step": 14286 + }, + { + "epoch": 0.5895436164067013, + "grad_norm": 2.0419552413047692, + "learning_rate": 1.1414457657463666e-06, + "loss": 0.5256, + "step": 14287 + }, + { + "epoch": 0.5895848807460593, + "grad_norm": 1.8553739712512831, + "learning_rate": 1.1412511046096044e-06, + "loss": 0.5395, + "step": 14288 + }, + { + "epoch": 0.5896261450854172, + "grad_norm": 2.412777991712902, + "learning_rate": 1.1410564498809956e-06, + "loss": 0.5005, + "step": 14289 + }, + { + "epoch": 0.5896674094247751, + "grad_norm": 2.471273275731334, + "learning_rate": 1.1408618015640164e-06, + "loss": 0.4782, + "step": 14290 + }, + { + "epoch": 0.589708673764133, + "grad_norm": 4.870378918228328, + "learning_rate": 1.1406671596621432e-06, + "loss": 0.5037, + "step": 14291 + }, + { + "epoch": 0.589749938103491, + "grad_norm": 3.357029086423977, + "learning_rate": 1.140472524178854e-06, + "loss": 0.5146, + "step": 14292 + }, + { + "epoch": 0.5897912024428489, + "grad_norm": 2.4634969456072175, + "learning_rate": 1.1402778951176244e-06, + "loss": 0.5457, + "step": 14293 + }, + { + "epoch": 0.5898324667822068, + "grad_norm": 2.900934158318415, + "learning_rate": 1.1400832724819319e-06, + "loss": 0.5215, + "step": 14294 + }, + { + "epoch": 0.5898737311215647, + "grad_norm": 7.795942278875975, + "learning_rate": 1.1398886562752523e-06, + "loss": 0.5192, + "step": 14295 + }, + { + "epoch": 0.5899149954609226, + "grad_norm": 4.632826663278168, + "learning_rate": 1.1396940465010622e-06, + "loss": 0.4852, + "step": 14296 + }, + { + "epoch": 0.5899562598002805, + "grad_norm": 3.3414683550001865, + "learning_rate": 1.1394994431628377e-06, + "loss": 0.5416, + "step": 14297 + }, + { + "epoch": 0.5899975241396386, + "grad_norm": 2.678596084757328, + "learning_rate": 1.1393048462640552e-06, + "loss": 0.5456, + "step": 14298 + }, + { + "epoch": 0.5900387884789965, + "grad_norm": 4.752895659464796, + "learning_rate": 1.1391102558081902e-06, + "loss": 0.5345, + "step": 14299 + }, + { + "epoch": 0.5900800528183544, + "grad_norm": 2.930729474798925, + "learning_rate": 1.138915671798719e-06, + "loss": 0.4838, + "step": 14300 + }, + { + "epoch": 0.5901213171577123, + "grad_norm": 2.687259353845629, + "learning_rate": 1.1387210942391175e-06, + "loss": 0.5225, + "step": 14301 + }, + { + "epoch": 0.5901625814970702, + "grad_norm": 4.123900024024115, + "learning_rate": 1.1385265231328613e-06, + "loss": 0.5025, + "step": 14302 + }, + { + "epoch": 0.5902038458364282, + "grad_norm": 4.985259761875081, + "learning_rate": 1.1383319584834246e-06, + "loss": 0.5635, + "step": 14303 + }, + { + "epoch": 0.5902451101757861, + "grad_norm": 3.349641040522155, + "learning_rate": 1.1381374002942845e-06, + "loss": 0.4639, + "step": 14304 + }, + { + "epoch": 0.590286374515144, + "grad_norm": 5.052030334912823, + "learning_rate": 1.1379428485689155e-06, + "loss": 0.5149, + "step": 14305 + }, + { + "epoch": 0.5903276388545019, + "grad_norm": 1.8326131903786285, + "learning_rate": 1.1377483033107932e-06, + "loss": 0.538, + "step": 14306 + }, + { + "epoch": 0.5903689031938598, + "grad_norm": 2.4187142912084356, + "learning_rate": 1.137553764523392e-06, + "loss": 0.558, + "step": 14307 + }, + { + "epoch": 0.5904101675332178, + "grad_norm": 4.6174578315650345, + "learning_rate": 1.1373592322101877e-06, + "loss": 0.5694, + "step": 14308 + }, + { + "epoch": 0.5904514318725758, + "grad_norm": 2.570910866139488, + "learning_rate": 1.1371647063746543e-06, + "loss": 0.5146, + "step": 14309 + }, + { + "epoch": 0.5904926962119337, + "grad_norm": 2.6485787299803256, + "learning_rate": 1.1369701870202672e-06, + "loss": 0.5447, + "step": 14310 + }, + { + "epoch": 0.5905339605512916, + "grad_norm": 7.568367304903581, + "learning_rate": 1.1367756741505005e-06, + "loss": 0.561, + "step": 14311 + }, + { + "epoch": 0.5905752248906495, + "grad_norm": 2.997661455694931, + "learning_rate": 1.1365811677688292e-06, + "loss": 0.5521, + "step": 14312 + }, + { + "epoch": 0.5906164892300074, + "grad_norm": 2.9099067426508167, + "learning_rate": 1.136386667878727e-06, + "loss": 0.4621, + "step": 14313 + }, + { + "epoch": 0.5906577535693653, + "grad_norm": 3.477753630460374, + "learning_rate": 1.136192174483669e-06, + "loss": 0.5146, + "step": 14314 + }, + { + "epoch": 0.5906990179087233, + "grad_norm": 5.274270065234146, + "learning_rate": 1.135997687587129e-06, + "loss": 0.5308, + "step": 14315 + }, + { + "epoch": 0.5907402822480812, + "grad_norm": 3.7860261275571006, + "learning_rate": 1.1358032071925802e-06, + "loss": 0.5242, + "step": 14316 + }, + { + "epoch": 0.5907815465874391, + "grad_norm": 3.0251318261572355, + "learning_rate": 1.1356087333034978e-06, + "loss": 0.5373, + "step": 14317 + }, + { + "epoch": 0.5908228109267971, + "grad_norm": 3.03801378281969, + "learning_rate": 1.1354142659233546e-06, + "loss": 0.4761, + "step": 14318 + }, + { + "epoch": 0.590864075266155, + "grad_norm": 3.5836376529063383, + "learning_rate": 1.1352198050556248e-06, + "loss": 0.5516, + "step": 14319 + }, + { + "epoch": 0.590905339605513, + "grad_norm": 5.419441595252616, + "learning_rate": 1.1350253507037819e-06, + "loss": 0.5525, + "step": 14320 + }, + { + "epoch": 0.5909466039448709, + "grad_norm": 3.9288896040032752, + "learning_rate": 1.134830902871299e-06, + "loss": 0.4957, + "step": 14321 + }, + { + "epoch": 0.5909878682842288, + "grad_norm": 3.0091232103752112, + "learning_rate": 1.1346364615616499e-06, + "loss": 0.5913, + "step": 14322 + }, + { + "epoch": 0.5910291326235867, + "grad_norm": 2.5274317514318514, + "learning_rate": 1.1344420267783075e-06, + "loss": 0.5436, + "step": 14323 + }, + { + "epoch": 0.5910703969629446, + "grad_norm": 2.318446211336417, + "learning_rate": 1.1342475985247451e-06, + "loss": 0.5099, + "step": 14324 + }, + { + "epoch": 0.5911116613023025, + "grad_norm": 3.9281087482838615, + "learning_rate": 1.1340531768044353e-06, + "loss": 0.4882, + "step": 14325 + }, + { + "epoch": 0.5911529256416604, + "grad_norm": 2.9450010483013855, + "learning_rate": 1.1338587616208515e-06, + "loss": 0.5427, + "step": 14326 + }, + { + "epoch": 0.5911941899810184, + "grad_norm": 2.5964855780671217, + "learning_rate": 1.133664352977466e-06, + "loss": 0.5145, + "step": 14327 + }, + { + "epoch": 0.5912354543203764, + "grad_norm": 4.956376403232521, + "learning_rate": 1.1334699508777515e-06, + "loss": 0.5362, + "step": 14328 + }, + { + "epoch": 0.5912767186597343, + "grad_norm": 2.103261014242275, + "learning_rate": 1.1332755553251814e-06, + "loss": 0.4999, + "step": 14329 + }, + { + "epoch": 0.5913179829990922, + "grad_norm": 2.0175560505059336, + "learning_rate": 1.1330811663232266e-06, + "loss": 0.5474, + "step": 14330 + }, + { + "epoch": 0.5913592473384501, + "grad_norm": 3.3386725317976644, + "learning_rate": 1.13288678387536e-06, + "loss": 0.4661, + "step": 14331 + }, + { + "epoch": 0.591400511677808, + "grad_norm": 2.645528026900661, + "learning_rate": 1.1326924079850539e-06, + "loss": 0.4975, + "step": 14332 + }, + { + "epoch": 0.591441776017166, + "grad_norm": 2.860138224642496, + "learning_rate": 1.1324980386557804e-06, + "loss": 0.5563, + "step": 14333 + }, + { + "epoch": 0.5914830403565239, + "grad_norm": 2.186283221186762, + "learning_rate": 1.1323036758910107e-06, + "loss": 0.4741, + "step": 14334 + }, + { + "epoch": 0.5915243046958818, + "grad_norm": 1.8898423657828152, + "learning_rate": 1.132109319694218e-06, + "loss": 0.5043, + "step": 14335 + }, + { + "epoch": 0.5915655690352397, + "grad_norm": 2.701142787107899, + "learning_rate": 1.1319149700688727e-06, + "loss": 0.5052, + "step": 14336 + }, + { + "epoch": 0.5916068333745976, + "grad_norm": 2.569915980294612, + "learning_rate": 1.131720627018447e-06, + "loss": 0.5051, + "step": 14337 + }, + { + "epoch": 0.5916480977139555, + "grad_norm": 1.9087055384104057, + "learning_rate": 1.1315262905464118e-06, + "loss": 0.537, + "step": 14338 + }, + { + "epoch": 0.5916893620533136, + "grad_norm": 9.225075467529848, + "learning_rate": 1.1313319606562396e-06, + "loss": 0.5586, + "step": 14339 + }, + { + "epoch": 0.5917306263926715, + "grad_norm": 4.42304408259736, + "learning_rate": 1.1311376373514006e-06, + "loss": 0.5402, + "step": 14340 + }, + { + "epoch": 0.5917718907320294, + "grad_norm": 1.9886983412479313, + "learning_rate": 1.1309433206353656e-06, + "loss": 0.5373, + "step": 14341 + }, + { + "epoch": 0.5918131550713873, + "grad_norm": 2.0423795243762397, + "learning_rate": 1.130749010511607e-06, + "loss": 0.4822, + "step": 14342 + }, + { + "epoch": 0.5918544194107452, + "grad_norm": 2.3076277582355083, + "learning_rate": 1.1305547069835944e-06, + "loss": 0.45, + "step": 14343 + }, + { + "epoch": 0.5918956837501032, + "grad_norm": 2.262772289485849, + "learning_rate": 1.1303604100547986e-06, + "loss": 0.4977, + "step": 14344 + }, + { + "epoch": 0.5919369480894611, + "grad_norm": 5.518489661828144, + "learning_rate": 1.130166119728691e-06, + "loss": 0.5449, + "step": 14345 + }, + { + "epoch": 0.591978212428819, + "grad_norm": 3.9973733676283754, + "learning_rate": 1.1299718360087413e-06, + "loss": 0.541, + "step": 14346 + }, + { + "epoch": 0.5920194767681769, + "grad_norm": 4.261501204831613, + "learning_rate": 1.1297775588984206e-06, + "loss": 0.5132, + "step": 14347 + }, + { + "epoch": 0.5920607411075348, + "grad_norm": 4.420727912318369, + "learning_rate": 1.1295832884011987e-06, + "loss": 0.5018, + "step": 14348 + }, + { + "epoch": 0.5921020054468928, + "grad_norm": 2.2747760698701778, + "learning_rate": 1.129389024520546e-06, + "loss": 0.5002, + "step": 14349 + }, + { + "epoch": 0.5921432697862508, + "grad_norm": 12.50064811742378, + "learning_rate": 1.1291947672599323e-06, + "loss": 0.5423, + "step": 14350 + }, + { + "epoch": 0.5921845341256087, + "grad_norm": 5.614707637849574, + "learning_rate": 1.1290005166228279e-06, + "loss": 0.5052, + "step": 14351 + }, + { + "epoch": 0.5922257984649666, + "grad_norm": 2.6147028897518623, + "learning_rate": 1.1288062726127021e-06, + "loss": 0.5651, + "step": 14352 + }, + { + "epoch": 0.5922670628043245, + "grad_norm": 1.9190622729200202, + "learning_rate": 1.128612035233025e-06, + "loss": 0.5359, + "step": 14353 + }, + { + "epoch": 0.5923083271436824, + "grad_norm": 2.5753940405001, + "learning_rate": 1.128417804487266e-06, + "loss": 0.5459, + "step": 14354 + }, + { + "epoch": 0.5923495914830403, + "grad_norm": 2.952456887476394, + "learning_rate": 1.1282235803788948e-06, + "loss": 0.5812, + "step": 14355 + }, + { + "epoch": 0.5923908558223983, + "grad_norm": 3.2782711805464206, + "learning_rate": 1.1280293629113806e-06, + "loss": 0.5834, + "step": 14356 + }, + { + "epoch": 0.5924321201617562, + "grad_norm": 2.331412851301993, + "learning_rate": 1.127835152088192e-06, + "loss": 0.5348, + "step": 14357 + }, + { + "epoch": 0.5924733845011141, + "grad_norm": 2.1939273663826193, + "learning_rate": 1.1276409479127987e-06, + "loss": 0.5258, + "step": 14358 + }, + { + "epoch": 0.5925146488404721, + "grad_norm": 2.1409923379080924, + "learning_rate": 1.12744675038867e-06, + "loss": 0.4902, + "step": 14359 + }, + { + "epoch": 0.59255591317983, + "grad_norm": 3.7447444975352404, + "learning_rate": 1.1272525595192737e-06, + "loss": 0.5341, + "step": 14360 + }, + { + "epoch": 0.592597177519188, + "grad_norm": 5.362844692132174, + "learning_rate": 1.1270583753080795e-06, + "loss": 0.5439, + "step": 14361 + }, + { + "epoch": 0.5926384418585459, + "grad_norm": 3.3834117249136044, + "learning_rate": 1.1268641977585555e-06, + "loss": 0.527, + "step": 14362 + }, + { + "epoch": 0.5926797061979038, + "grad_norm": 2.757440951455428, + "learning_rate": 1.1266700268741705e-06, + "loss": 0.4715, + "step": 14363 + }, + { + "epoch": 0.5927209705372617, + "grad_norm": 3.0809598717935383, + "learning_rate": 1.1264758626583926e-06, + "loss": 0.5387, + "step": 14364 + }, + { + "epoch": 0.5927622348766196, + "grad_norm": 2.1092307166476463, + "learning_rate": 1.1262817051146905e-06, + "loss": 0.4805, + "step": 14365 + }, + { + "epoch": 0.5928034992159775, + "grad_norm": 4.799531337285198, + "learning_rate": 1.1260875542465318e-06, + "loss": 0.5649, + "step": 14366 + }, + { + "epoch": 0.5928447635553354, + "grad_norm": 5.516928307218583, + "learning_rate": 1.125893410057385e-06, + "loss": 0.5336, + "step": 14367 + }, + { + "epoch": 0.5928860278946934, + "grad_norm": 3.2711769054037476, + "learning_rate": 1.125699272550718e-06, + "loss": 0.5162, + "step": 14368 + }, + { + "epoch": 0.5929272922340514, + "grad_norm": 4.022734770976552, + "learning_rate": 1.1255051417299977e-06, + "loss": 0.5414, + "step": 14369 + }, + { + "epoch": 0.5929685565734093, + "grad_norm": 1.9904011145913212, + "learning_rate": 1.125311017598693e-06, + "loss": 0.5148, + "step": 14370 + }, + { + "epoch": 0.5930098209127672, + "grad_norm": 3.9196481438690007, + "learning_rate": 1.1251169001602702e-06, + "loss": 0.5504, + "step": 14371 + }, + { + "epoch": 0.5930510852521251, + "grad_norm": 3.7898463359615637, + "learning_rate": 1.124922789418198e-06, + "loss": 0.4766, + "step": 14372 + }, + { + "epoch": 0.593092349591483, + "grad_norm": 2.4972308559245415, + "learning_rate": 1.1247286853759424e-06, + "loss": 0.5338, + "step": 14373 + }, + { + "epoch": 0.593133613930841, + "grad_norm": 2.0029895213364246, + "learning_rate": 1.1245345880369718e-06, + "loss": 0.5174, + "step": 14374 + }, + { + "epoch": 0.5931748782701989, + "grad_norm": 2.3925881780535625, + "learning_rate": 1.1243404974047524e-06, + "loss": 0.4963, + "step": 14375 + }, + { + "epoch": 0.5932161426095568, + "grad_norm": 2.251562856024498, + "learning_rate": 1.124146413482752e-06, + "loss": 0.5392, + "step": 14376 + }, + { + "epoch": 0.5932574069489147, + "grad_norm": 2.6570121840929093, + "learning_rate": 1.123952336274436e-06, + "loss": 0.506, + "step": 14377 + }, + { + "epoch": 0.5932986712882726, + "grad_norm": 2.3538583549905554, + "learning_rate": 1.1237582657832728e-06, + "loss": 0.4783, + "step": 14378 + }, + { + "epoch": 0.5933399356276307, + "grad_norm": 17.351992363522985, + "learning_rate": 1.123564202012728e-06, + "loss": 0.4984, + "step": 14379 + }, + { + "epoch": 0.5933811999669886, + "grad_norm": 2.4083173525242163, + "learning_rate": 1.1233701449662681e-06, + "loss": 0.4916, + "step": 14380 + }, + { + "epoch": 0.5934224643063465, + "grad_norm": 2.374519952833266, + "learning_rate": 1.1231760946473602e-06, + "loss": 0.5013, + "step": 14381 + }, + { + "epoch": 0.5934637286457044, + "grad_norm": 3.326612863596463, + "learning_rate": 1.1229820510594696e-06, + "loss": 0.529, + "step": 14382 + }, + { + "epoch": 0.5935049929850623, + "grad_norm": 4.764625469255807, + "learning_rate": 1.1227880142060622e-06, + "loss": 0.5559, + "step": 14383 + }, + { + "epoch": 0.5935462573244202, + "grad_norm": 5.121725233016766, + "learning_rate": 1.122593984090605e-06, + "loss": 0.5278, + "step": 14384 + }, + { + "epoch": 0.5935875216637782, + "grad_norm": 4.797322854004533, + "learning_rate": 1.122399960716563e-06, + "loss": 0.5654, + "step": 14385 + }, + { + "epoch": 0.5936287860031361, + "grad_norm": 9.507203906197494, + "learning_rate": 1.1222059440874026e-06, + "loss": 0.5692, + "step": 14386 + }, + { + "epoch": 0.593670050342494, + "grad_norm": 2.5955802122734672, + "learning_rate": 1.122011934206589e-06, + "loss": 0.4871, + "step": 14387 + }, + { + "epoch": 0.5937113146818519, + "grad_norm": 2.1793077828743064, + "learning_rate": 1.121817931077588e-06, + "loss": 0.4899, + "step": 14388 + }, + { + "epoch": 0.5937525790212099, + "grad_norm": 3.082827446399023, + "learning_rate": 1.1216239347038645e-06, + "loss": 0.5037, + "step": 14389 + }, + { + "epoch": 0.5937938433605678, + "grad_norm": 2.117933803232897, + "learning_rate": 1.1214299450888843e-06, + "loss": 0.5295, + "step": 14390 + }, + { + "epoch": 0.5938351076999258, + "grad_norm": 2.6436092954974053, + "learning_rate": 1.121235962236112e-06, + "loss": 0.5041, + "step": 14391 + }, + { + "epoch": 0.5938763720392837, + "grad_norm": 1.6950390705915024, + "learning_rate": 1.1210419861490136e-06, + "loss": 0.4658, + "step": 14392 + }, + { + "epoch": 0.5939176363786416, + "grad_norm": 3.4118749331565317, + "learning_rate": 1.1208480168310529e-06, + "loss": 0.5065, + "step": 14393 + }, + { + "epoch": 0.5939589007179995, + "grad_norm": 2.7973773469297742, + "learning_rate": 1.1206540542856954e-06, + "loss": 0.5405, + "step": 14394 + }, + { + "epoch": 0.5940001650573574, + "grad_norm": 3.3376782791341135, + "learning_rate": 1.1204600985164055e-06, + "loss": 0.502, + "step": 14395 + }, + { + "epoch": 0.5940414293967153, + "grad_norm": 3.0955153905438686, + "learning_rate": 1.1202661495266472e-06, + "loss": 0.5694, + "step": 14396 + }, + { + "epoch": 0.5940826937360733, + "grad_norm": 5.365768335491766, + "learning_rate": 1.120072207319886e-06, + "loss": 0.5542, + "step": 14397 + }, + { + "epoch": 0.5941239580754312, + "grad_norm": 4.046076061090656, + "learning_rate": 1.1198782718995851e-06, + "loss": 0.5133, + "step": 14398 + }, + { + "epoch": 0.5941652224147891, + "grad_norm": 2.2094932749302174, + "learning_rate": 1.1196843432692094e-06, + "loss": 0.5771, + "step": 14399 + }, + { + "epoch": 0.5942064867541471, + "grad_norm": 3.1771449253367394, + "learning_rate": 1.1194904214322226e-06, + "loss": 0.5096, + "step": 14400 + }, + { + "epoch": 0.594247751093505, + "grad_norm": 3.4981046777082034, + "learning_rate": 1.1192965063920886e-06, + "loss": 0.582, + "step": 14401 + }, + { + "epoch": 0.5942890154328629, + "grad_norm": 4.350001542220642, + "learning_rate": 1.119102598152272e-06, + "loss": 0.5475, + "step": 14402 + }, + { + "epoch": 0.5943302797722209, + "grad_norm": 2.411627961618536, + "learning_rate": 1.1189086967162352e-06, + "loss": 0.572, + "step": 14403 + }, + { + "epoch": 0.5943715441115788, + "grad_norm": 5.727557109025586, + "learning_rate": 1.118714802087443e-06, + "loss": 0.4207, + "step": 14404 + }, + { + "epoch": 0.5944128084509367, + "grad_norm": 1.8937986497918962, + "learning_rate": 1.1185209142693578e-06, + "loss": 0.513, + "step": 14405 + }, + { + "epoch": 0.5944540727902946, + "grad_norm": 2.393181592519057, + "learning_rate": 1.1183270332654437e-06, + "loss": 0.4852, + "step": 14406 + }, + { + "epoch": 0.5944953371296525, + "grad_norm": 15.762453105880017, + "learning_rate": 1.118133159079164e-06, + "loss": 0.5011, + "step": 14407 + }, + { + "epoch": 0.5945366014690104, + "grad_norm": 3.2081109882631025, + "learning_rate": 1.1179392917139804e-06, + "loss": 0.4786, + "step": 14408 + }, + { + "epoch": 0.5945778658083684, + "grad_norm": 2.6005441101380824, + "learning_rate": 1.1177454311733574e-06, + "loss": 0.4946, + "step": 14409 + }, + { + "epoch": 0.5946191301477264, + "grad_norm": 4.07101638687564, + "learning_rate": 1.1175515774607568e-06, + "loss": 0.4493, + "step": 14410 + }, + { + "epoch": 0.5946603944870843, + "grad_norm": 7.621188065031291, + "learning_rate": 1.1173577305796423e-06, + "loss": 0.4597, + "step": 14411 + }, + { + "epoch": 0.5947016588264422, + "grad_norm": 3.532016474872548, + "learning_rate": 1.1171638905334755e-06, + "loss": 0.5493, + "step": 14412 + }, + { + "epoch": 0.5947429231658001, + "grad_norm": 2.7050283769533654, + "learning_rate": 1.1169700573257195e-06, + "loss": 0.5169, + "step": 14413 + }, + { + "epoch": 0.594784187505158, + "grad_norm": 2.403895852871886, + "learning_rate": 1.1167762309598366e-06, + "loss": 0.4946, + "step": 14414 + }, + { + "epoch": 0.594825451844516, + "grad_norm": 4.197407720567727, + "learning_rate": 1.1165824114392887e-06, + "loss": 0.53, + "step": 14415 + }, + { + "epoch": 0.5948667161838739, + "grad_norm": 11.078330243360833, + "learning_rate": 1.1163885987675382e-06, + "loss": 0.5248, + "step": 14416 + }, + { + "epoch": 0.5949079805232318, + "grad_norm": 3.41552078993474, + "learning_rate": 1.116194792948047e-06, + "loss": 0.4791, + "step": 14417 + }, + { + "epoch": 0.5949492448625897, + "grad_norm": 7.699508513985924, + "learning_rate": 1.116000993984277e-06, + "loss": 0.5401, + "step": 14418 + }, + { + "epoch": 0.5949905092019476, + "grad_norm": 3.993647504372412, + "learning_rate": 1.1158072018796896e-06, + "loss": 0.5772, + "step": 14419 + }, + { + "epoch": 0.5950317735413057, + "grad_norm": 2.300902403558796, + "learning_rate": 1.115613416637747e-06, + "loss": 0.5398, + "step": 14420 + }, + { + "epoch": 0.5950730378806636, + "grad_norm": 2.745165657028725, + "learning_rate": 1.1154196382619107e-06, + "loss": 0.5283, + "step": 14421 + }, + { + "epoch": 0.5951143022200215, + "grad_norm": 4.314679001329561, + "learning_rate": 1.1152258667556408e-06, + "loss": 0.5303, + "step": 14422 + }, + { + "epoch": 0.5951555665593794, + "grad_norm": 6.343323017551191, + "learning_rate": 1.1150321021224002e-06, + "loss": 0.5112, + "step": 14423 + }, + { + "epoch": 0.5951968308987373, + "grad_norm": 2.520519277128045, + "learning_rate": 1.1148383443656487e-06, + "loss": 0.5755, + "step": 14424 + }, + { + "epoch": 0.5952380952380952, + "grad_norm": 2.141455909704527, + "learning_rate": 1.1146445934888483e-06, + "loss": 0.5084, + "step": 14425 + }, + { + "epoch": 0.5952793595774531, + "grad_norm": 2.430176947488887, + "learning_rate": 1.1144508494954594e-06, + "loss": 0.5448, + "step": 14426 + }, + { + "epoch": 0.5953206239168111, + "grad_norm": 23.25772241733347, + "learning_rate": 1.1142571123889428e-06, + "loss": 0.5432, + "step": 14427 + }, + { + "epoch": 0.595361888256169, + "grad_norm": 3.5322115329031716, + "learning_rate": 1.1140633821727591e-06, + "loss": 0.5234, + "step": 14428 + }, + { + "epoch": 0.5954031525955269, + "grad_norm": 4.3235480218186275, + "learning_rate": 1.113869658850369e-06, + "loss": 0.4933, + "step": 14429 + }, + { + "epoch": 0.5954444169348849, + "grad_norm": 2.232881925149184, + "learning_rate": 1.1136759424252326e-06, + "loss": 0.543, + "step": 14430 + }, + { + "epoch": 0.5954856812742428, + "grad_norm": 6.949015425687305, + "learning_rate": 1.1134822329008104e-06, + "loss": 0.568, + "step": 14431 + }, + { + "epoch": 0.5955269456136008, + "grad_norm": 2.389837914857456, + "learning_rate": 1.1132885302805622e-06, + "loss": 0.4836, + "step": 14432 + }, + { + "epoch": 0.5955682099529587, + "grad_norm": 3.0135554046731667, + "learning_rate": 1.1130948345679486e-06, + "loss": 0.542, + "step": 14433 + }, + { + "epoch": 0.5956094742923166, + "grad_norm": 3.5778035024606765, + "learning_rate": 1.1129011457664295e-06, + "loss": 0.5405, + "step": 14434 + }, + { + "epoch": 0.5956507386316745, + "grad_norm": 1.8284179647404075, + "learning_rate": 1.1127074638794635e-06, + "loss": 0.5001, + "step": 14435 + }, + { + "epoch": 0.5956920029710324, + "grad_norm": 3.0565823752881727, + "learning_rate": 1.1125137889105115e-06, + "loss": 0.5544, + "step": 14436 + }, + { + "epoch": 0.5957332673103903, + "grad_norm": 4.012912699935339, + "learning_rate": 1.1123201208630326e-06, + "loss": 0.4714, + "step": 14437 + }, + { + "epoch": 0.5957745316497483, + "grad_norm": 3.2580895754869235, + "learning_rate": 1.1121264597404858e-06, + "loss": 0.505, + "step": 14438 + }, + { + "epoch": 0.5958157959891062, + "grad_norm": 2.0220226526602088, + "learning_rate": 1.1119328055463312e-06, + "loss": 0.5146, + "step": 14439 + }, + { + "epoch": 0.5958570603284642, + "grad_norm": 3.4446584655932386, + "learning_rate": 1.111739158284027e-06, + "loss": 0.6002, + "step": 14440 + }, + { + "epoch": 0.5958983246678221, + "grad_norm": 2.7171531238274067, + "learning_rate": 1.111545517957033e-06, + "loss": 0.5241, + "step": 14441 + }, + { + "epoch": 0.59593958900718, + "grad_norm": 2.026913274938948, + "learning_rate": 1.1113518845688076e-06, + "loss": 0.5215, + "step": 14442 + }, + { + "epoch": 0.5959808533465379, + "grad_norm": 2.1478264953721777, + "learning_rate": 1.11115825812281e-06, + "loss": 0.5397, + "step": 14443 + }, + { + "epoch": 0.5960221176858959, + "grad_norm": 3.9454857018860894, + "learning_rate": 1.1109646386224984e-06, + "loss": 0.5468, + "step": 14444 + }, + { + "epoch": 0.5960633820252538, + "grad_norm": 1.896066832848716, + "learning_rate": 1.110771026071332e-06, + "loss": 0.5107, + "step": 14445 + }, + { + "epoch": 0.5961046463646117, + "grad_norm": 3.6935715140259693, + "learning_rate": 1.1105774204727683e-06, + "loss": 0.491, + "step": 14446 + }, + { + "epoch": 0.5961459107039696, + "grad_norm": 9.082527721429724, + "learning_rate": 1.1103838218302668e-06, + "loss": 0.5235, + "step": 14447 + }, + { + "epoch": 0.5961871750433275, + "grad_norm": 2.4780346856701754, + "learning_rate": 1.1101902301472845e-06, + "loss": 0.565, + "step": 14448 + }, + { + "epoch": 0.5962284393826854, + "grad_norm": 8.926648578944363, + "learning_rate": 1.1099966454272796e-06, + "loss": 0.5006, + "step": 14449 + }, + { + "epoch": 0.5962697037220435, + "grad_norm": 3.247210667174165, + "learning_rate": 1.1098030676737105e-06, + "loss": 0.4997, + "step": 14450 + }, + { + "epoch": 0.5963109680614014, + "grad_norm": 5.585592301778083, + "learning_rate": 1.1096094968900343e-06, + "loss": 0.5258, + "step": 14451 + }, + { + "epoch": 0.5963522324007593, + "grad_norm": 2.169545819654116, + "learning_rate": 1.1094159330797096e-06, + "loss": 0.5144, + "step": 14452 + }, + { + "epoch": 0.5963934967401172, + "grad_norm": 2.4902851305453972, + "learning_rate": 1.1092223762461933e-06, + "loss": 0.512, + "step": 14453 + }, + { + "epoch": 0.5964347610794751, + "grad_norm": 2.7502909146287875, + "learning_rate": 1.1090288263929429e-06, + "loss": 0.4963, + "step": 14454 + }, + { + "epoch": 0.596476025418833, + "grad_norm": 3.1627858759619376, + "learning_rate": 1.1088352835234154e-06, + "loss": 0.5607, + "step": 14455 + }, + { + "epoch": 0.596517289758191, + "grad_norm": 2.5879099763742657, + "learning_rate": 1.108641747641069e-06, + "loss": 0.4996, + "step": 14456 + }, + { + "epoch": 0.5965585540975489, + "grad_norm": 3.7366422408775817, + "learning_rate": 1.1084482187493595e-06, + "loss": 0.5242, + "step": 14457 + }, + { + "epoch": 0.5965998184369068, + "grad_norm": 2.7458891583371523, + "learning_rate": 1.1082546968517444e-06, + "loss": 0.5023, + "step": 14458 + }, + { + "epoch": 0.5966410827762647, + "grad_norm": 2.2749705679292953, + "learning_rate": 1.1080611819516804e-06, + "loss": 0.5776, + "step": 14459 + }, + { + "epoch": 0.5966823471156226, + "grad_norm": 3.9009455203794747, + "learning_rate": 1.1078676740526247e-06, + "loss": 0.4663, + "step": 14460 + }, + { + "epoch": 0.5967236114549807, + "grad_norm": 3.2634071644526705, + "learning_rate": 1.1076741731580329e-06, + "loss": 0.5464, + "step": 14461 + }, + { + "epoch": 0.5967648757943386, + "grad_norm": 5.240562383324536, + "learning_rate": 1.1074806792713617e-06, + "loss": 0.5627, + "step": 14462 + }, + { + "epoch": 0.5968061401336965, + "grad_norm": 9.074086324694282, + "learning_rate": 1.1072871923960674e-06, + "loss": 0.5312, + "step": 14463 + }, + { + "epoch": 0.5968474044730544, + "grad_norm": 2.1902677060269093, + "learning_rate": 1.1070937125356065e-06, + "loss": 0.5423, + "step": 14464 + }, + { + "epoch": 0.5968886688124123, + "grad_norm": 2.0054658820659848, + "learning_rate": 1.1069002396934344e-06, + "loss": 0.5139, + "step": 14465 + }, + { + "epoch": 0.5969299331517702, + "grad_norm": 2.7077965432684388, + "learning_rate": 1.1067067738730079e-06, + "loss": 0.535, + "step": 14466 + }, + { + "epoch": 0.5969711974911281, + "grad_norm": 2.3872531316247985, + "learning_rate": 1.106513315077782e-06, + "loss": 0.5102, + "step": 14467 + }, + { + "epoch": 0.5970124618304861, + "grad_norm": 6.555521728774664, + "learning_rate": 1.1063198633112129e-06, + "loss": 0.5278, + "step": 14468 + }, + { + "epoch": 0.597053726169844, + "grad_norm": 2.1914012655419444, + "learning_rate": 1.1061264185767555e-06, + "loss": 0.4875, + "step": 14469 + }, + { + "epoch": 0.5970949905092019, + "grad_norm": 3.6317844324752255, + "learning_rate": 1.1059329808778659e-06, + "loss": 0.5263, + "step": 14470 + }, + { + "epoch": 0.5971362548485599, + "grad_norm": 10.96331568260641, + "learning_rate": 1.1057395502179987e-06, + "loss": 0.4683, + "step": 14471 + }, + { + "epoch": 0.5971775191879178, + "grad_norm": 2.3718020959292563, + "learning_rate": 1.10554612660061e-06, + "loss": 0.5139, + "step": 14472 + }, + { + "epoch": 0.5972187835272758, + "grad_norm": 3.4093681365885664, + "learning_rate": 1.1053527100291543e-06, + "loss": 0.4843, + "step": 14473 + }, + { + "epoch": 0.5972600478666337, + "grad_norm": 5.296870556546599, + "learning_rate": 1.105159300507086e-06, + "loss": 0.5916, + "step": 14474 + }, + { + "epoch": 0.5973013122059916, + "grad_norm": 6.004058791739786, + "learning_rate": 1.1049658980378604e-06, + "loss": 0.5282, + "step": 14475 + }, + { + "epoch": 0.5973425765453495, + "grad_norm": 6.570706794328079, + "learning_rate": 1.1047725026249324e-06, + "loss": 0.5168, + "step": 14476 + }, + { + "epoch": 0.5973838408847074, + "grad_norm": 1.8600493390670088, + "learning_rate": 1.1045791142717557e-06, + "loss": 0.5088, + "step": 14477 + }, + { + "epoch": 0.5974251052240653, + "grad_norm": 2.5803270799718723, + "learning_rate": 1.1043857329817854e-06, + "loss": 0.5038, + "step": 14478 + }, + { + "epoch": 0.5974663695634232, + "grad_norm": 1.8581094318599514, + "learning_rate": 1.1041923587584754e-06, + "loss": 0.5088, + "step": 14479 + }, + { + "epoch": 0.5975076339027812, + "grad_norm": 6.513060608612284, + "learning_rate": 1.1039989916052806e-06, + "loss": 0.4881, + "step": 14480 + }, + { + "epoch": 0.5975488982421392, + "grad_norm": 2.8934392517499883, + "learning_rate": 1.1038056315256537e-06, + "loss": 0.5221, + "step": 14481 + }, + { + "epoch": 0.5975901625814971, + "grad_norm": 2.4842443964378655, + "learning_rate": 1.10361227852305e-06, + "loss": 0.5035, + "step": 14482 + }, + { + "epoch": 0.597631426920855, + "grad_norm": 11.602083598519974, + "learning_rate": 1.1034189326009222e-06, + "loss": 0.5333, + "step": 14483 + }, + { + "epoch": 0.5976726912602129, + "grad_norm": 3.9766370292315227, + "learning_rate": 1.1032255937627246e-06, + "loss": 0.5395, + "step": 14484 + }, + { + "epoch": 0.5977139555995709, + "grad_norm": 2.440998646551132, + "learning_rate": 1.1030322620119103e-06, + "loss": 0.5591, + "step": 14485 + }, + { + "epoch": 0.5977552199389288, + "grad_norm": 4.631771573648468, + "learning_rate": 1.1028389373519335e-06, + "loss": 0.5529, + "step": 14486 + }, + { + "epoch": 0.5977964842782867, + "grad_norm": 6.261764655773664, + "learning_rate": 1.1026456197862463e-06, + "loss": 0.5133, + "step": 14487 + }, + { + "epoch": 0.5978377486176446, + "grad_norm": 4.644791915941564, + "learning_rate": 1.102452309318302e-06, + "loss": 0.4802, + "step": 14488 + }, + { + "epoch": 0.5978790129570025, + "grad_norm": 9.60300160283472, + "learning_rate": 1.1022590059515546e-06, + "loss": 0.5011, + "step": 14489 + }, + { + "epoch": 0.5979202772963604, + "grad_norm": 8.076798183475292, + "learning_rate": 1.1020657096894558e-06, + "loss": 0.4973, + "step": 14490 + }, + { + "epoch": 0.5979615416357185, + "grad_norm": 6.657022507180957, + "learning_rate": 1.1018724205354594e-06, + "loss": 0.5368, + "step": 14491 + }, + { + "epoch": 0.5980028059750764, + "grad_norm": 2.2085543280779585, + "learning_rate": 1.1016791384930171e-06, + "loss": 0.4993, + "step": 14492 + }, + { + "epoch": 0.5980440703144343, + "grad_norm": 3.6074124349842327, + "learning_rate": 1.1014858635655821e-06, + "loss": 0.5531, + "step": 14493 + }, + { + "epoch": 0.5980853346537922, + "grad_norm": 11.451738863693386, + "learning_rate": 1.1012925957566063e-06, + "loss": 0.4691, + "step": 14494 + }, + { + "epoch": 0.5981265989931501, + "grad_norm": 7.0406207699364955, + "learning_rate": 1.1010993350695422e-06, + "loss": 0.5021, + "step": 14495 + }, + { + "epoch": 0.598167863332508, + "grad_norm": 4.993314095371433, + "learning_rate": 1.1009060815078423e-06, + "loss": 0.5374, + "step": 14496 + }, + { + "epoch": 0.598209127671866, + "grad_norm": 4.97459910210398, + "learning_rate": 1.1007128350749575e-06, + "loss": 0.4909, + "step": 14497 + }, + { + "epoch": 0.5982503920112239, + "grad_norm": 4.022896201818009, + "learning_rate": 1.100519595774341e-06, + "loss": 0.5945, + "step": 14498 + }, + { + "epoch": 0.5982916563505818, + "grad_norm": 5.026577577991844, + "learning_rate": 1.100326363609444e-06, + "loss": 0.4766, + "step": 14499 + }, + { + "epoch": 0.5983329206899397, + "grad_norm": 4.390967948653813, + "learning_rate": 1.1001331385837173e-06, + "loss": 0.6102, + "step": 14500 + }, + { + "epoch": 0.5983741850292977, + "grad_norm": 2.728773777581239, + "learning_rate": 1.0999399207006134e-06, + "loss": 0.4957, + "step": 14501 + }, + { + "epoch": 0.5984154493686556, + "grad_norm": 5.682857294643385, + "learning_rate": 1.099746709963583e-06, + "loss": 0.5504, + "step": 14502 + }, + { + "epoch": 0.5984567137080136, + "grad_norm": 1.795637208608634, + "learning_rate": 1.0995535063760777e-06, + "loss": 0.5023, + "step": 14503 + }, + { + "epoch": 0.5984979780473715, + "grad_norm": 2.917450426834062, + "learning_rate": 1.0993603099415486e-06, + "loss": 0.5172, + "step": 14504 + }, + { + "epoch": 0.5985392423867294, + "grad_norm": 5.35661966537841, + "learning_rate": 1.0991671206634468e-06, + "loss": 0.485, + "step": 14505 + }, + { + "epoch": 0.5985805067260873, + "grad_norm": 2.129139891936461, + "learning_rate": 1.0989739385452225e-06, + "loss": 0.5069, + "step": 14506 + }, + { + "epoch": 0.5986217710654452, + "grad_norm": 2.2961763417501393, + "learning_rate": 1.0987807635903275e-06, + "loss": 0.5222, + "step": 14507 + }, + { + "epoch": 0.5986630354048031, + "grad_norm": 2.276580137392719, + "learning_rate": 1.098587595802211e-06, + "loss": 0.5476, + "step": 14508 + }, + { + "epoch": 0.5987042997441611, + "grad_norm": 3.134438724159, + "learning_rate": 1.098394435184325e-06, + "loss": 0.5458, + "step": 14509 + }, + { + "epoch": 0.598745564083519, + "grad_norm": 3.085348887976087, + "learning_rate": 1.0982012817401183e-06, + "loss": 0.5893, + "step": 14510 + }, + { + "epoch": 0.598786828422877, + "grad_norm": 4.944446177572277, + "learning_rate": 1.0980081354730426e-06, + "loss": 0.5316, + "step": 14511 + }, + { + "epoch": 0.5988280927622349, + "grad_norm": 4.571486746821428, + "learning_rate": 1.0978149963865465e-06, + "loss": 0.4948, + "step": 14512 + }, + { + "epoch": 0.5988693571015928, + "grad_norm": 2.60301536837959, + "learning_rate": 1.0976218644840816e-06, + "loss": 0.516, + "step": 14513 + }, + { + "epoch": 0.5989106214409508, + "grad_norm": 4.786140313858065, + "learning_rate": 1.0974287397690964e-06, + "loss": 0.4804, + "step": 14514 + }, + { + "epoch": 0.5989518857803087, + "grad_norm": 2.1518121135105885, + "learning_rate": 1.097235622245041e-06, + "loss": 0.4951, + "step": 14515 + }, + { + "epoch": 0.5989931501196666, + "grad_norm": 1.86260676211712, + "learning_rate": 1.0970425119153648e-06, + "loss": 0.4636, + "step": 14516 + }, + { + "epoch": 0.5990344144590245, + "grad_norm": 2.231991200357405, + "learning_rate": 1.0968494087835174e-06, + "loss": 0.4856, + "step": 14517 + }, + { + "epoch": 0.5990756787983824, + "grad_norm": 2.0352106770219636, + "learning_rate": 1.096656312852948e-06, + "loss": 0.4972, + "step": 14518 + }, + { + "epoch": 0.5991169431377403, + "grad_norm": 1.8113680821128881, + "learning_rate": 1.0964632241271058e-06, + "loss": 0.563, + "step": 14519 + }, + { + "epoch": 0.5991582074770982, + "grad_norm": 3.370405548980295, + "learning_rate": 1.0962701426094399e-06, + "loss": 0.5076, + "step": 14520 + }, + { + "epoch": 0.5991994718164562, + "grad_norm": 2.8812900604630447, + "learning_rate": 1.0960770683033996e-06, + "loss": 0.523, + "step": 14521 + }, + { + "epoch": 0.5992407361558142, + "grad_norm": 2.4179572266583857, + "learning_rate": 1.0958840012124327e-06, + "loss": 0.4809, + "step": 14522 + }, + { + "epoch": 0.5992820004951721, + "grad_norm": 3.2548653381812596, + "learning_rate": 1.095690941339989e-06, + "loss": 0.5193, + "step": 14523 + }, + { + "epoch": 0.59932326483453, + "grad_norm": 3.1950384089217567, + "learning_rate": 1.0954978886895165e-06, + "loss": 0.5721, + "step": 14524 + }, + { + "epoch": 0.5993645291738879, + "grad_norm": 2.250622870915403, + "learning_rate": 1.0953048432644634e-06, + "loss": 0.524, + "step": 14525 + }, + { + "epoch": 0.5994057935132459, + "grad_norm": 2.5699115504140106, + "learning_rate": 1.095111805068279e-06, + "loss": 0.5306, + "step": 14526 + }, + { + "epoch": 0.5994470578526038, + "grad_norm": 2.187713538301694, + "learning_rate": 1.0949187741044097e-06, + "loss": 0.4921, + "step": 14527 + }, + { + "epoch": 0.5994883221919617, + "grad_norm": 7.782357266094778, + "learning_rate": 1.0947257503763049e-06, + "loss": 0.4572, + "step": 14528 + }, + { + "epoch": 0.5995295865313196, + "grad_norm": 4.344943527014411, + "learning_rate": 1.0945327338874115e-06, + "loss": 0.5087, + "step": 14529 + }, + { + "epoch": 0.5995708508706775, + "grad_norm": 3.4591049772887907, + "learning_rate": 1.0943397246411783e-06, + "loss": 0.5526, + "step": 14530 + }, + { + "epoch": 0.5996121152100354, + "grad_norm": 3.6148339488265386, + "learning_rate": 1.0941467226410522e-06, + "loss": 0.5399, + "step": 14531 + }, + { + "epoch": 0.5996533795493935, + "grad_norm": 3.684174928808336, + "learning_rate": 1.093953727890481e-06, + "loss": 0.5044, + "step": 14532 + }, + { + "epoch": 0.5996946438887514, + "grad_norm": 3.0863650572188965, + "learning_rate": 1.0937607403929118e-06, + "loss": 0.5051, + "step": 14533 + }, + { + "epoch": 0.5997359082281093, + "grad_norm": 3.0336418237163847, + "learning_rate": 1.0935677601517925e-06, + "loss": 0.4741, + "step": 14534 + }, + { + "epoch": 0.5997771725674672, + "grad_norm": 2.7124057647543585, + "learning_rate": 1.093374787170569e-06, + "loss": 0.5099, + "step": 14535 + }, + { + "epoch": 0.5998184369068251, + "grad_norm": 2.265302030745755, + "learning_rate": 1.0931818214526898e-06, + "loss": 0.5758, + "step": 14536 + }, + { + "epoch": 0.599859701246183, + "grad_norm": 3.827739511986834, + "learning_rate": 1.0929888630016008e-06, + "loss": 0.5743, + "step": 14537 + }, + { + "epoch": 0.599900965585541, + "grad_norm": 4.246774071327671, + "learning_rate": 1.0927959118207485e-06, + "loss": 0.5247, + "step": 14538 + }, + { + "epoch": 0.5999422299248989, + "grad_norm": 2.017724910378103, + "learning_rate": 1.0926029679135805e-06, + "loss": 0.5097, + "step": 14539 + }, + { + "epoch": 0.5999834942642568, + "grad_norm": 3.0739209416375446, + "learning_rate": 1.0924100312835424e-06, + "loss": 0.5278, + "step": 14540 + }, + { + "epoch": 0.6000247586036147, + "grad_norm": 5.360655147749356, + "learning_rate": 1.0922171019340804e-06, + "loss": 0.5116, + "step": 14541 + }, + { + "epoch": 0.6000660229429727, + "grad_norm": 7.839028973609092, + "learning_rate": 1.0920241798686414e-06, + "loss": 0.5187, + "step": 14542 + }, + { + "epoch": 0.6001072872823306, + "grad_norm": 4.011684909203281, + "learning_rate": 1.0918312650906707e-06, + "loss": 0.5181, + "step": 14543 + }, + { + "epoch": 0.6001485516216886, + "grad_norm": 1.7343332760315997, + "learning_rate": 1.0916383576036153e-06, + "loss": 0.5237, + "step": 14544 + }, + { + "epoch": 0.6001898159610465, + "grad_norm": 2.1508900465287732, + "learning_rate": 1.09144545741092e-06, + "loss": 0.4874, + "step": 14545 + }, + { + "epoch": 0.6002310803004044, + "grad_norm": 5.206165697241783, + "learning_rate": 1.0912525645160312e-06, + "loss": 0.5338, + "step": 14546 + }, + { + "epoch": 0.6002723446397623, + "grad_norm": 3.9269198867477404, + "learning_rate": 1.0910596789223938e-06, + "loss": 0.4981, + "step": 14547 + }, + { + "epoch": 0.6003136089791202, + "grad_norm": 4.656329697573994, + "learning_rate": 1.0908668006334537e-06, + "loss": 0.4857, + "step": 14548 + }, + { + "epoch": 0.6003548733184781, + "grad_norm": 2.945061075400027, + "learning_rate": 1.0906739296526558e-06, + "loss": 0.522, + "step": 14549 + }, + { + "epoch": 0.6003961376578361, + "grad_norm": 2.8653404425767657, + "learning_rate": 1.090481065983446e-06, + "loss": 0.5257, + "step": 14550 + }, + { + "epoch": 0.600437401997194, + "grad_norm": 3.8251190039251184, + "learning_rate": 1.0902882096292683e-06, + "loss": 0.5996, + "step": 14551 + }, + { + "epoch": 0.600478666336552, + "grad_norm": 4.374550280336372, + "learning_rate": 1.0900953605935688e-06, + "loss": 0.5582, + "step": 14552 + }, + { + "epoch": 0.6005199306759099, + "grad_norm": 2.830883252462565, + "learning_rate": 1.0899025188797913e-06, + "loss": 0.5326, + "step": 14553 + }, + { + "epoch": 0.6005611950152678, + "grad_norm": 2.009313187914556, + "learning_rate": 1.0897096844913806e-06, + "loss": 0.5094, + "step": 14554 + }, + { + "epoch": 0.6006024593546258, + "grad_norm": 2.8096295700682163, + "learning_rate": 1.0895168574317811e-06, + "loss": 0.5278, + "step": 14555 + }, + { + "epoch": 0.6006437236939837, + "grad_norm": 4.660239882460976, + "learning_rate": 1.0893240377044379e-06, + "loss": 0.5183, + "step": 14556 + }, + { + "epoch": 0.6006849880333416, + "grad_norm": 8.886076216043202, + "learning_rate": 1.089131225312794e-06, + "loss": 0.5168, + "step": 14557 + }, + { + "epoch": 0.6007262523726995, + "grad_norm": 7.899614789826033, + "learning_rate": 1.0889384202602948e-06, + "loss": 0.4673, + "step": 14558 + }, + { + "epoch": 0.6007675167120574, + "grad_norm": 2.6018969207866456, + "learning_rate": 1.0887456225503835e-06, + "loss": 0.5103, + "step": 14559 + }, + { + "epoch": 0.6008087810514153, + "grad_norm": 2.1115658692790564, + "learning_rate": 1.0885528321865044e-06, + "loss": 0.5398, + "step": 14560 + }, + { + "epoch": 0.6008500453907732, + "grad_norm": 2.513289594273007, + "learning_rate": 1.0883600491721008e-06, + "loss": 0.5165, + "step": 14561 + }, + { + "epoch": 0.6008913097301313, + "grad_norm": 3.5795249859402216, + "learning_rate": 1.0881672735106166e-06, + "loss": 0.5321, + "step": 14562 + }, + { + "epoch": 0.6009325740694892, + "grad_norm": 4.94070113021045, + "learning_rate": 1.0879745052054948e-06, + "loss": 0.4926, + "step": 14563 + }, + { + "epoch": 0.6009738384088471, + "grad_norm": 1.832170194605762, + "learning_rate": 1.0877817442601798e-06, + "loss": 0.5031, + "step": 14564 + }, + { + "epoch": 0.601015102748205, + "grad_norm": 15.16434375267681, + "learning_rate": 1.087588990678114e-06, + "loss": 0.5532, + "step": 14565 + }, + { + "epoch": 0.6010563670875629, + "grad_norm": 6.44374930343111, + "learning_rate": 1.08739624446274e-06, + "loss": 0.521, + "step": 14566 + }, + { + "epoch": 0.6010976314269209, + "grad_norm": 5.74550268606892, + "learning_rate": 1.0872035056175015e-06, + "loss": 0.51, + "step": 14567 + }, + { + "epoch": 0.6011388957662788, + "grad_norm": 2.7678844535215537, + "learning_rate": 1.087010774145841e-06, + "loss": 0.5483, + "step": 14568 + }, + { + "epoch": 0.6011801601056367, + "grad_norm": 4.345027176373578, + "learning_rate": 1.0868180500512012e-06, + "loss": 0.4984, + "step": 14569 + }, + { + "epoch": 0.6012214244449946, + "grad_norm": 2.18210896412912, + "learning_rate": 1.0866253333370245e-06, + "loss": 0.4543, + "step": 14570 + }, + { + "epoch": 0.6012626887843525, + "grad_norm": 3.75597643549733, + "learning_rate": 1.0864326240067537e-06, + "loss": 0.537, + "step": 14571 + }, + { + "epoch": 0.6013039531237105, + "grad_norm": 4.485319358006865, + "learning_rate": 1.0862399220638303e-06, + "loss": 0.5206, + "step": 14572 + }, + { + "epoch": 0.6013452174630685, + "grad_norm": 2.628155193095616, + "learning_rate": 1.0860472275116976e-06, + "loss": 0.4945, + "step": 14573 + }, + { + "epoch": 0.6013864818024264, + "grad_norm": 2.652538659883051, + "learning_rate": 1.0858545403537967e-06, + "loss": 0.554, + "step": 14574 + }, + { + "epoch": 0.6014277461417843, + "grad_norm": 1.8055711167715012, + "learning_rate": 1.0856618605935697e-06, + "loss": 0.5063, + "step": 14575 + }, + { + "epoch": 0.6014690104811422, + "grad_norm": 3.5183588162278085, + "learning_rate": 1.0854691882344588e-06, + "loss": 0.5368, + "step": 14576 + }, + { + "epoch": 0.6015102748205001, + "grad_norm": 3.34170185206432, + "learning_rate": 1.0852765232799047e-06, + "loss": 0.484, + "step": 14577 + }, + { + "epoch": 0.601551539159858, + "grad_norm": 5.153870253523235, + "learning_rate": 1.08508386573335e-06, + "loss": 0.5351, + "step": 14578 + }, + { + "epoch": 0.601592803499216, + "grad_norm": 4.092469098696768, + "learning_rate": 1.084891215598235e-06, + "loss": 0.5587, + "step": 14579 + }, + { + "epoch": 0.6016340678385739, + "grad_norm": 3.2762640313945615, + "learning_rate": 1.084698572878001e-06, + "loss": 0.5077, + "step": 14580 + }, + { + "epoch": 0.6016753321779318, + "grad_norm": 3.9306684224519977, + "learning_rate": 1.08450593757609e-06, + "loss": 0.5559, + "step": 14581 + }, + { + "epoch": 0.6017165965172897, + "grad_norm": 3.7275151253582655, + "learning_rate": 1.0843133096959418e-06, + "loss": 0.5336, + "step": 14582 + }, + { + "epoch": 0.6017578608566477, + "grad_norm": 2.512098724109535, + "learning_rate": 1.0841206892409981e-06, + "loss": 0.5258, + "step": 14583 + }, + { + "epoch": 0.6017991251960056, + "grad_norm": 3.0422251945132035, + "learning_rate": 1.083928076214699e-06, + "loss": 0.5262, + "step": 14584 + }, + { + "epoch": 0.6018403895353636, + "grad_norm": 2.9174172740261226, + "learning_rate": 1.0837354706204857e-06, + "loss": 0.4861, + "step": 14585 + }, + { + "epoch": 0.6018816538747215, + "grad_norm": 2.877773805242621, + "learning_rate": 1.0835428724617979e-06, + "loss": 0.5459, + "step": 14586 + }, + { + "epoch": 0.6019229182140794, + "grad_norm": 3.906886400358996, + "learning_rate": 1.0833502817420763e-06, + "loss": 0.4683, + "step": 14587 + }, + { + "epoch": 0.6019641825534373, + "grad_norm": 2.734839099174869, + "learning_rate": 1.083157698464761e-06, + "loss": 0.516, + "step": 14588 + }, + { + "epoch": 0.6020054468927952, + "grad_norm": 1.9016255151563604, + "learning_rate": 1.082965122633292e-06, + "loss": 0.4831, + "step": 14589 + }, + { + "epoch": 0.6020467112321531, + "grad_norm": 2.802863924414923, + "learning_rate": 1.082772554251109e-06, + "loss": 0.4756, + "step": 14590 + }, + { + "epoch": 0.6020879755715111, + "grad_norm": 3.7848366892442833, + "learning_rate": 1.0825799933216523e-06, + "loss": 0.5018, + "step": 14591 + }, + { + "epoch": 0.602129239910869, + "grad_norm": 3.409505019121761, + "learning_rate": 1.0823874398483614e-06, + "loss": 0.4988, + "step": 14592 + }, + { + "epoch": 0.602170504250227, + "grad_norm": 9.777631673235337, + "learning_rate": 1.0821948938346752e-06, + "loss": 0.5381, + "step": 14593 + }, + { + "epoch": 0.6022117685895849, + "grad_norm": 4.162118569994918, + "learning_rate": 1.0820023552840332e-06, + "loss": 0.5052, + "step": 14594 + }, + { + "epoch": 0.6022530329289428, + "grad_norm": 3.5380044702436413, + "learning_rate": 1.0818098241998748e-06, + "loss": 0.5359, + "step": 14595 + }, + { + "epoch": 0.6022942972683007, + "grad_norm": 3.047420916783555, + "learning_rate": 1.0816173005856388e-06, + "loss": 0.517, + "step": 14596 + }, + { + "epoch": 0.6023355616076587, + "grad_norm": 2.6823147504143052, + "learning_rate": 1.0814247844447652e-06, + "loss": 0.5117, + "step": 14597 + }, + { + "epoch": 0.6023768259470166, + "grad_norm": 1.8813343642680154, + "learning_rate": 1.0812322757806914e-06, + "loss": 0.5135, + "step": 14598 + }, + { + "epoch": 0.6024180902863745, + "grad_norm": 4.44157409810854, + "learning_rate": 1.081039774596857e-06, + "loss": 0.505, + "step": 14599 + }, + { + "epoch": 0.6024593546257324, + "grad_norm": 6.268094234759881, + "learning_rate": 1.0808472808967003e-06, + "loss": 0.4967, + "step": 14600 + }, + { + "epoch": 0.6025006189650903, + "grad_norm": 2.0898404564235684, + "learning_rate": 1.0806547946836598e-06, + "loss": 0.5516, + "step": 14601 + }, + { + "epoch": 0.6025418833044482, + "grad_norm": 9.913830189352593, + "learning_rate": 1.0804623159611735e-06, + "loss": 0.5135, + "step": 14602 + }, + { + "epoch": 0.6025831476438063, + "grad_norm": 4.308083683464335, + "learning_rate": 1.08026984473268e-06, + "loss": 0.5561, + "step": 14603 + }, + { + "epoch": 0.6026244119831642, + "grad_norm": 1.9390745453584992, + "learning_rate": 1.0800773810016168e-06, + "loss": 0.5584, + "step": 14604 + }, + { + "epoch": 0.6026656763225221, + "grad_norm": 3.5165201017722194, + "learning_rate": 1.079884924771423e-06, + "loss": 0.4999, + "step": 14605 + }, + { + "epoch": 0.60270694066188, + "grad_norm": 2.519869679757577, + "learning_rate": 1.0796924760455346e-06, + "loss": 0.5145, + "step": 14606 + }, + { + "epoch": 0.6027482050012379, + "grad_norm": 2.090633211902034, + "learning_rate": 1.07950003482739e-06, + "loss": 0.517, + "step": 14607 + }, + { + "epoch": 0.6027894693405959, + "grad_norm": 2.450287339261695, + "learning_rate": 1.0793076011204268e-06, + "loss": 0.4613, + "step": 14608 + }, + { + "epoch": 0.6028307336799538, + "grad_norm": 3.8479540991161163, + "learning_rate": 1.079115174928082e-06, + "loss": 0.573, + "step": 14609 + }, + { + "epoch": 0.6028719980193117, + "grad_norm": 2.8186922979258733, + "learning_rate": 1.0789227562537934e-06, + "loss": 0.4849, + "step": 14610 + }, + { + "epoch": 0.6029132623586696, + "grad_norm": 2.8153438293938517, + "learning_rate": 1.0787303451009975e-06, + "loss": 0.518, + "step": 14611 + }, + { + "epoch": 0.6029545266980275, + "grad_norm": 3.0708454517492187, + "learning_rate": 1.0785379414731319e-06, + "loss": 0.5006, + "step": 14612 + }, + { + "epoch": 0.6029957910373855, + "grad_norm": 1.8606085546537785, + "learning_rate": 1.0783455453736324e-06, + "loss": 0.4941, + "step": 14613 + }, + { + "epoch": 0.6030370553767435, + "grad_norm": 2.002476012515117, + "learning_rate": 1.0781531568059368e-06, + "loss": 0.5155, + "step": 14614 + }, + { + "epoch": 0.6030783197161014, + "grad_norm": 1.9019931603395706, + "learning_rate": 1.077960775773481e-06, + "loss": 0.5825, + "step": 14615 + }, + { + "epoch": 0.6031195840554593, + "grad_norm": 2.402352464892786, + "learning_rate": 1.0777684022797013e-06, + "loss": 0.5741, + "step": 14616 + }, + { + "epoch": 0.6031608483948172, + "grad_norm": 3.7648048053649084, + "learning_rate": 1.0775760363280346e-06, + "loss": 0.5181, + "step": 14617 + }, + { + "epoch": 0.6032021127341751, + "grad_norm": 3.3230936246584277, + "learning_rate": 1.0773836779219168e-06, + "loss": 0.573, + "step": 14618 + }, + { + "epoch": 0.603243377073533, + "grad_norm": 3.4305248317918884, + "learning_rate": 1.0771913270647832e-06, + "loss": 0.5514, + "step": 14619 + }, + { + "epoch": 0.603284641412891, + "grad_norm": 2.3345245757289685, + "learning_rate": 1.0769989837600704e-06, + "loss": 0.4528, + "step": 14620 + }, + { + "epoch": 0.6033259057522489, + "grad_norm": 2.9059623088519118, + "learning_rate": 1.0768066480112139e-06, + "loss": 0.4659, + "step": 14621 + }, + { + "epoch": 0.6033671700916068, + "grad_norm": 2.2063459976266646, + "learning_rate": 1.0766143198216492e-06, + "loss": 0.5121, + "step": 14622 + }, + { + "epoch": 0.6034084344309648, + "grad_norm": 1.8024636612397416, + "learning_rate": 1.076421999194812e-06, + "loss": 0.446, + "step": 14623 + }, + { + "epoch": 0.6034496987703227, + "grad_norm": 4.892513486151939, + "learning_rate": 1.0762296861341377e-06, + "loss": 0.5128, + "step": 14624 + }, + { + "epoch": 0.6034909631096806, + "grad_norm": 4.65816077344268, + "learning_rate": 1.076037380643061e-06, + "loss": 0.4716, + "step": 14625 + }, + { + "epoch": 0.6035322274490386, + "grad_norm": 6.33312733848087, + "learning_rate": 1.0758450827250176e-06, + "loss": 0.4884, + "step": 14626 + }, + { + "epoch": 0.6035734917883965, + "grad_norm": 2.7785052905205445, + "learning_rate": 1.0756527923834416e-06, + "loss": 0.5151, + "step": 14627 + }, + { + "epoch": 0.6036147561277544, + "grad_norm": 3.994535523550446, + "learning_rate": 1.0754605096217687e-06, + "loss": 0.484, + "step": 14628 + }, + { + "epoch": 0.6036560204671123, + "grad_norm": 2.7395709154238674, + "learning_rate": 1.075268234443433e-06, + "loss": 0.4646, + "step": 14629 + }, + { + "epoch": 0.6036972848064702, + "grad_norm": 1.9037944614378237, + "learning_rate": 1.0750759668518692e-06, + "loss": 0.5617, + "step": 14630 + }, + { + "epoch": 0.6037385491458281, + "grad_norm": 2.151693145597068, + "learning_rate": 1.0748837068505122e-06, + "loss": 0.5329, + "step": 14631 + }, + { + "epoch": 0.6037798134851861, + "grad_norm": 2.76599308283443, + "learning_rate": 1.0746914544427951e-06, + "loss": 0.531, + "step": 14632 + }, + { + "epoch": 0.6038210778245441, + "grad_norm": 5.293368084887232, + "learning_rate": 1.0744992096321524e-06, + "loss": 0.5814, + "step": 14633 + }, + { + "epoch": 0.603862342163902, + "grad_norm": 1.9126338702044632, + "learning_rate": 1.0743069724220186e-06, + "loss": 0.529, + "step": 14634 + }, + { + "epoch": 0.6039036065032599, + "grad_norm": 2.553604992981972, + "learning_rate": 1.0741147428158267e-06, + "loss": 0.5691, + "step": 14635 + }, + { + "epoch": 0.6039448708426178, + "grad_norm": 3.3965481296988576, + "learning_rate": 1.0739225208170115e-06, + "loss": 0.4658, + "step": 14636 + }, + { + "epoch": 0.6039861351819757, + "grad_norm": 5.005510695190604, + "learning_rate": 1.0737303064290055e-06, + "loss": 0.5372, + "step": 14637 + }, + { + "epoch": 0.6040273995213337, + "grad_norm": 2.389160510679941, + "learning_rate": 1.0735380996552427e-06, + "loss": 0.5427, + "step": 14638 + }, + { + "epoch": 0.6040686638606916, + "grad_norm": 3.047703684668124, + "learning_rate": 1.0733459004991563e-06, + "loss": 0.4829, + "step": 14639 + }, + { + "epoch": 0.6041099282000495, + "grad_norm": 4.301455590961907, + "learning_rate": 1.0731537089641795e-06, + "loss": 0.547, + "step": 14640 + }, + { + "epoch": 0.6041511925394074, + "grad_norm": 2.36199728857551, + "learning_rate": 1.0729615250537451e-06, + "loss": 0.5311, + "step": 14641 + }, + { + "epoch": 0.6041924568787653, + "grad_norm": 2.756904947623602, + "learning_rate": 1.0727693487712867e-06, + "loss": 0.5289, + "step": 14642 + }, + { + "epoch": 0.6042337212181232, + "grad_norm": 6.0166904987502035, + "learning_rate": 1.0725771801202358e-06, + "loss": 0.5578, + "step": 14643 + }, + { + "epoch": 0.6042749855574813, + "grad_norm": 2.21043128513017, + "learning_rate": 1.0723850191040265e-06, + "loss": 0.531, + "step": 14644 + }, + { + "epoch": 0.6043162498968392, + "grad_norm": 4.6869971978204426, + "learning_rate": 1.0721928657260901e-06, + "loss": 0.5911, + "step": 14645 + }, + { + "epoch": 0.6043575142361971, + "grad_norm": 2.359842855016355, + "learning_rate": 1.072000719989859e-06, + "loss": 0.5456, + "step": 14646 + }, + { + "epoch": 0.604398778575555, + "grad_norm": 2.9872390396638133, + "learning_rate": 1.0718085818987663e-06, + "loss": 0.445, + "step": 14647 + }, + { + "epoch": 0.6044400429149129, + "grad_norm": 2.4570032869736105, + "learning_rate": 1.071616451456243e-06, + "loss": 0.4881, + "step": 14648 + }, + { + "epoch": 0.6044813072542708, + "grad_norm": 4.927676606387367, + "learning_rate": 1.0714243286657215e-06, + "loss": 0.5033, + "step": 14649 + }, + { + "epoch": 0.6045225715936288, + "grad_norm": 2.591160340945626, + "learning_rate": 1.0712322135306338e-06, + "loss": 0.4981, + "step": 14650 + }, + { + "epoch": 0.6045638359329867, + "grad_norm": 4.807388198368671, + "learning_rate": 1.0710401060544115e-06, + "loss": 0.5473, + "step": 14651 + }, + { + "epoch": 0.6046051002723446, + "grad_norm": 4.411646573867722, + "learning_rate": 1.0708480062404856e-06, + "loss": 0.5339, + "step": 14652 + }, + { + "epoch": 0.6046463646117025, + "grad_norm": 2.85942373355361, + "learning_rate": 1.0706559140922884e-06, + "loss": 0.5145, + "step": 14653 + }, + { + "epoch": 0.6046876289510605, + "grad_norm": 3.0273790185418115, + "learning_rate": 1.0704638296132507e-06, + "loss": 0.5221, + "step": 14654 + }, + { + "epoch": 0.6047288932904185, + "grad_norm": 3.732649027792273, + "learning_rate": 1.0702717528068029e-06, + "loss": 0.5555, + "step": 14655 + }, + { + "epoch": 0.6047701576297764, + "grad_norm": 3.1804473559261943, + "learning_rate": 1.070079683676377e-06, + "loss": 0.5657, + "step": 14656 + }, + { + "epoch": 0.6048114219691343, + "grad_norm": 2.6832746070876854, + "learning_rate": 1.0698876222254038e-06, + "loss": 0.4815, + "step": 14657 + }, + { + "epoch": 0.6048526863084922, + "grad_norm": 2.7886580636963476, + "learning_rate": 1.0696955684573132e-06, + "loss": 0.5185, + "step": 14658 + }, + { + "epoch": 0.6048939506478501, + "grad_norm": 3.2721874897509116, + "learning_rate": 1.0695035223755365e-06, + "loss": 0.5144, + "step": 14659 + }, + { + "epoch": 0.604935214987208, + "grad_norm": 2.3693195220658647, + "learning_rate": 1.0693114839835034e-06, + "loss": 0.5219, + "step": 14660 + }, + { + "epoch": 0.604976479326566, + "grad_norm": 2.1495109993873567, + "learning_rate": 1.0691194532846446e-06, + "loss": 0.5033, + "step": 14661 + }, + { + "epoch": 0.6050177436659239, + "grad_norm": 2.4349917874930127, + "learning_rate": 1.0689274302823904e-06, + "loss": 0.5327, + "step": 14662 + }, + { + "epoch": 0.6050590080052818, + "grad_norm": 2.493333229083261, + "learning_rate": 1.0687354149801708e-06, + "loss": 0.5029, + "step": 14663 + }, + { + "epoch": 0.6051002723446398, + "grad_norm": 4.848828757229997, + "learning_rate": 1.0685434073814154e-06, + "loss": 0.5338, + "step": 14664 + }, + { + "epoch": 0.6051415366839977, + "grad_norm": 3.5858427174146903, + "learning_rate": 1.0683514074895542e-06, + "loss": 0.5526, + "step": 14665 + }, + { + "epoch": 0.6051828010233556, + "grad_norm": 2.782979437182114, + "learning_rate": 1.0681594153080166e-06, + "loss": 0.4574, + "step": 14666 + }, + { + "epoch": 0.6052240653627136, + "grad_norm": 5.624589306410063, + "learning_rate": 1.0679674308402321e-06, + "loss": 0.497, + "step": 14667 + }, + { + "epoch": 0.6052653297020715, + "grad_norm": 7.599018058758075, + "learning_rate": 1.0677754540896301e-06, + "loss": 0.5628, + "step": 14668 + }, + { + "epoch": 0.6053065940414294, + "grad_norm": 2.1520803678180394, + "learning_rate": 1.0675834850596397e-06, + "loss": 0.557, + "step": 14669 + }, + { + "epoch": 0.6053478583807873, + "grad_norm": 3.878831253332719, + "learning_rate": 1.0673915237536908e-06, + "loss": 0.511, + "step": 14670 + }, + { + "epoch": 0.6053891227201452, + "grad_norm": 1.9909165309890373, + "learning_rate": 1.0671995701752107e-06, + "loss": 0.5202, + "step": 14671 + }, + { + "epoch": 0.6054303870595031, + "grad_norm": 2.941882383666015, + "learning_rate": 1.067007624327629e-06, + "loss": 0.4639, + "step": 14672 + }, + { + "epoch": 0.605471651398861, + "grad_norm": 5.279021173073558, + "learning_rate": 1.0668156862143746e-06, + "loss": 0.5198, + "step": 14673 + }, + { + "epoch": 0.6055129157382191, + "grad_norm": 3.859608625239047, + "learning_rate": 1.0666237558388751e-06, + "loss": 0.5065, + "step": 14674 + }, + { + "epoch": 0.605554180077577, + "grad_norm": 3.8799307280036963, + "learning_rate": 1.0664318332045602e-06, + "loss": 0.5484, + "step": 14675 + }, + { + "epoch": 0.6055954444169349, + "grad_norm": 2.9162345104527754, + "learning_rate": 1.0662399183148568e-06, + "loss": 0.4989, + "step": 14676 + }, + { + "epoch": 0.6056367087562928, + "grad_norm": 4.377689957138003, + "learning_rate": 1.066048011173194e-06, + "loss": 0.5203, + "step": 14677 + }, + { + "epoch": 0.6056779730956507, + "grad_norm": 3.572244263597959, + "learning_rate": 1.065856111782999e-06, + "loss": 0.4632, + "step": 14678 + }, + { + "epoch": 0.6057192374350087, + "grad_norm": 19.25117603568921, + "learning_rate": 1.0656642201477e-06, + "loss": 0.4527, + "step": 14679 + }, + { + "epoch": 0.6057605017743666, + "grad_norm": 2.420816392069556, + "learning_rate": 1.0654723362707243e-06, + "loss": 0.5419, + "step": 14680 + }, + { + "epoch": 0.6058017661137245, + "grad_norm": 2.8401042052183594, + "learning_rate": 1.0652804601555003e-06, + "loss": 0.4942, + "step": 14681 + }, + { + "epoch": 0.6058430304530824, + "grad_norm": 2.7026201629629085, + "learning_rate": 1.065088591805454e-06, + "loss": 0.5188, + "step": 14682 + }, + { + "epoch": 0.6058842947924403, + "grad_norm": 2.6268336259481413, + "learning_rate": 1.064896731224014e-06, + "loss": 0.5912, + "step": 14683 + }, + { + "epoch": 0.6059255591317984, + "grad_norm": 3.3420119101209877, + "learning_rate": 1.0647048784146072e-06, + "loss": 0.5003, + "step": 14684 + }, + { + "epoch": 0.6059668234711563, + "grad_norm": 3.130638128263558, + "learning_rate": 1.0645130333806597e-06, + "loss": 0.5161, + "step": 14685 + }, + { + "epoch": 0.6060080878105142, + "grad_norm": 2.8519432117968773, + "learning_rate": 1.0643211961255988e-06, + "loss": 0.5143, + "step": 14686 + }, + { + "epoch": 0.6060493521498721, + "grad_norm": 5.704576318643687, + "learning_rate": 1.0641293666528512e-06, + "loss": 0.4983, + "step": 14687 + }, + { + "epoch": 0.60609061648923, + "grad_norm": 2.1219346973654507, + "learning_rate": 1.0639375449658436e-06, + "loss": 0.5297, + "step": 14688 + }, + { + "epoch": 0.6061318808285879, + "grad_norm": 2.7224725396660308, + "learning_rate": 1.0637457310680022e-06, + "loss": 0.5476, + "step": 14689 + }, + { + "epoch": 0.6061731451679458, + "grad_norm": 8.73932440669229, + "learning_rate": 1.0635539249627538e-06, + "loss": 0.5156, + "step": 14690 + }, + { + "epoch": 0.6062144095073038, + "grad_norm": 4.239477071128447, + "learning_rate": 1.0633621266535235e-06, + "loss": 0.5634, + "step": 14691 + }, + { + "epoch": 0.6062556738466617, + "grad_norm": 2.659472344570681, + "learning_rate": 1.0631703361437385e-06, + "loss": 0.491, + "step": 14692 + }, + { + "epoch": 0.6062969381860196, + "grad_norm": 2.4325491722194115, + "learning_rate": 1.0629785534368238e-06, + "loss": 0.5637, + "step": 14693 + }, + { + "epoch": 0.6063382025253776, + "grad_norm": 5.435922800843315, + "learning_rate": 1.0627867785362054e-06, + "loss": 0.4827, + "step": 14694 + }, + { + "epoch": 0.6063794668647355, + "grad_norm": 5.961949334987409, + "learning_rate": 1.062595011445309e-06, + "loss": 0.5107, + "step": 14695 + }, + { + "epoch": 0.6064207312040935, + "grad_norm": 2.1296172972045713, + "learning_rate": 1.0624032521675596e-06, + "loss": 0.4871, + "step": 14696 + }, + { + "epoch": 0.6064619955434514, + "grad_norm": 3.4797499071012012, + "learning_rate": 1.0622115007063835e-06, + "loss": 0.5248, + "step": 14697 + }, + { + "epoch": 0.6065032598828093, + "grad_norm": 2.6395134815343737, + "learning_rate": 1.0620197570652048e-06, + "loss": 0.4917, + "step": 14698 + }, + { + "epoch": 0.6065445242221672, + "grad_norm": 2.122665287082489, + "learning_rate": 1.0618280212474487e-06, + "loss": 0.4746, + "step": 14699 + }, + { + "epoch": 0.6065857885615251, + "grad_norm": 3.061040802747961, + "learning_rate": 1.0616362932565404e-06, + "loss": 0.5337, + "step": 14700 + }, + { + "epoch": 0.606627052900883, + "grad_norm": 2.4012615182330204, + "learning_rate": 1.0614445730959042e-06, + "loss": 0.5624, + "step": 14701 + }, + { + "epoch": 0.606668317240241, + "grad_norm": 2.565528423459789, + "learning_rate": 1.0612528607689655e-06, + "loss": 0.5438, + "step": 14702 + }, + { + "epoch": 0.6067095815795989, + "grad_norm": 2.787365233249681, + "learning_rate": 1.061061156279148e-06, + "loss": 0.5043, + "step": 14703 + }, + { + "epoch": 0.6067508459189568, + "grad_norm": 3.974081374553892, + "learning_rate": 1.0608694596298765e-06, + "loss": 0.5858, + "step": 14704 + }, + { + "epoch": 0.6067921102583148, + "grad_norm": 2.42705060710741, + "learning_rate": 1.0606777708245748e-06, + "loss": 0.4455, + "step": 14705 + }, + { + "epoch": 0.6068333745976727, + "grad_norm": 2.5607425603085887, + "learning_rate": 1.0604860898666673e-06, + "loss": 0.5519, + "step": 14706 + }, + { + "epoch": 0.6068746389370306, + "grad_norm": 3.3805796037689686, + "learning_rate": 1.0602944167595775e-06, + "loss": 0.5538, + "step": 14707 + }, + { + "epoch": 0.6069159032763886, + "grad_norm": 4.093046534642952, + "learning_rate": 1.0601027515067296e-06, + "loss": 0.5479, + "step": 14708 + }, + { + "epoch": 0.6069571676157465, + "grad_norm": 7.882682183960493, + "learning_rate": 1.0599110941115468e-06, + "loss": 0.546, + "step": 14709 + }, + { + "epoch": 0.6069984319551044, + "grad_norm": 2.959476745786275, + "learning_rate": 1.0597194445774537e-06, + "loss": 0.5365, + "step": 14710 + }, + { + "epoch": 0.6070396962944623, + "grad_norm": 2.867282976562485, + "learning_rate": 1.059527802907872e-06, + "loss": 0.5631, + "step": 14711 + }, + { + "epoch": 0.6070809606338202, + "grad_norm": 2.2087211318880806, + "learning_rate": 1.0593361691062257e-06, + "loss": 0.4959, + "step": 14712 + }, + { + "epoch": 0.6071222249731781, + "grad_norm": 2.692925418270476, + "learning_rate": 1.0591445431759373e-06, + "loss": 0.5367, + "step": 14713 + }, + { + "epoch": 0.607163489312536, + "grad_norm": 3.9067533056410055, + "learning_rate": 1.0589529251204309e-06, + "loss": 0.5492, + "step": 14714 + }, + { + "epoch": 0.6072047536518941, + "grad_norm": 2.0240428571633284, + "learning_rate": 1.0587613149431282e-06, + "loss": 0.4913, + "step": 14715 + }, + { + "epoch": 0.607246017991252, + "grad_norm": 4.978302921190071, + "learning_rate": 1.0585697126474524e-06, + "loss": 0.4714, + "step": 14716 + }, + { + "epoch": 0.6072872823306099, + "grad_norm": 2.8280510581618215, + "learning_rate": 1.0583781182368257e-06, + "loss": 0.4795, + "step": 14717 + }, + { + "epoch": 0.6073285466699678, + "grad_norm": 6.6744939160747725, + "learning_rate": 1.0581865317146705e-06, + "loss": 0.4592, + "step": 14718 + }, + { + "epoch": 0.6073698110093257, + "grad_norm": 2.752059247251628, + "learning_rate": 1.057994953084409e-06, + "loss": 0.5587, + "step": 14719 + }, + { + "epoch": 0.6074110753486837, + "grad_norm": 37.60541993596092, + "learning_rate": 1.057803382349464e-06, + "loss": 0.5058, + "step": 14720 + }, + { + "epoch": 0.6074523396880416, + "grad_norm": 2.8802917300592066, + "learning_rate": 1.0576118195132558e-06, + "loss": 0.5041, + "step": 14721 + }, + { + "epoch": 0.6074936040273995, + "grad_norm": 2.6565838954096908, + "learning_rate": 1.0574202645792082e-06, + "loss": 0.5481, + "step": 14722 + }, + { + "epoch": 0.6075348683667574, + "grad_norm": 4.837967409975619, + "learning_rate": 1.057228717550742e-06, + "loss": 0.4787, + "step": 14723 + }, + { + "epoch": 0.6075761327061153, + "grad_norm": 2.469194205445648, + "learning_rate": 1.0570371784312774e-06, + "loss": 0.5063, + "step": 14724 + }, + { + "epoch": 0.6076173970454734, + "grad_norm": 3.1716757202997394, + "learning_rate": 1.0568456472242377e-06, + "loss": 0.5118, + "step": 14725 + }, + { + "epoch": 0.6076586613848313, + "grad_norm": 2.673953011392567, + "learning_rate": 1.056654123933043e-06, + "loss": 0.5988, + "step": 14726 + }, + { + "epoch": 0.6076999257241892, + "grad_norm": 1.962519736180472, + "learning_rate": 1.0564626085611151e-06, + "loss": 0.5155, + "step": 14727 + }, + { + "epoch": 0.6077411900635471, + "grad_norm": 2.6786816024242044, + "learning_rate": 1.056271101111874e-06, + "loss": 0.5421, + "step": 14728 + }, + { + "epoch": 0.607782454402905, + "grad_norm": 3.879371091487709, + "learning_rate": 1.0560796015887414e-06, + "loss": 0.5259, + "step": 14729 + }, + { + "epoch": 0.6078237187422629, + "grad_norm": 3.024284454194094, + "learning_rate": 1.0558881099951376e-06, + "loss": 0.4992, + "step": 14730 + }, + { + "epoch": 0.6078649830816208, + "grad_norm": 4.038593060961708, + "learning_rate": 1.0556966263344833e-06, + "loss": 0.545, + "step": 14731 + }, + { + "epoch": 0.6079062474209788, + "grad_norm": 2.7163524540224353, + "learning_rate": 1.0555051506101987e-06, + "loss": 0.6027, + "step": 14732 + }, + { + "epoch": 0.6079475117603367, + "grad_norm": 2.5812041458419257, + "learning_rate": 1.0553136828257038e-06, + "loss": 0.5302, + "step": 14733 + }, + { + "epoch": 0.6079887760996946, + "grad_norm": 5.764798913626597, + "learning_rate": 1.0551222229844194e-06, + "loss": 0.4876, + "step": 14734 + }, + { + "epoch": 0.6080300404390526, + "grad_norm": 2.921856738149607, + "learning_rate": 1.0549307710897645e-06, + "loss": 0.6095, + "step": 14735 + }, + { + "epoch": 0.6080713047784105, + "grad_norm": 8.905004774622446, + "learning_rate": 1.0547393271451601e-06, + "loss": 0.5868, + "step": 14736 + }, + { + "epoch": 0.6081125691177685, + "grad_norm": 3.717840741451081, + "learning_rate": 1.054547891154025e-06, + "loss": 0.5608, + "step": 14737 + }, + { + "epoch": 0.6081538334571264, + "grad_norm": 4.179131204374861, + "learning_rate": 1.0543564631197785e-06, + "loss": 0.5139, + "step": 14738 + }, + { + "epoch": 0.6081950977964843, + "grad_norm": 3.9679675189993913, + "learning_rate": 1.0541650430458405e-06, + "loss": 0.4981, + "step": 14739 + }, + { + "epoch": 0.6082363621358422, + "grad_norm": 2.8757406058038497, + "learning_rate": 1.05397363093563e-06, + "loss": 0.5784, + "step": 14740 + }, + { + "epoch": 0.6082776264752001, + "grad_norm": 2.357456361382822, + "learning_rate": 1.0537822267925667e-06, + "loss": 0.4911, + "step": 14741 + }, + { + "epoch": 0.608318890814558, + "grad_norm": 4.335035569828182, + "learning_rate": 1.0535908306200685e-06, + "loss": 0.5632, + "step": 14742 + }, + { + "epoch": 0.608360155153916, + "grad_norm": 1.89675722051433, + "learning_rate": 1.0533994424215554e-06, + "loss": 0.5485, + "step": 14743 + }, + { + "epoch": 0.6084014194932739, + "grad_norm": 60.0079291285691, + "learning_rate": 1.0532080622004452e-06, + "loss": 0.4962, + "step": 14744 + }, + { + "epoch": 0.6084426838326319, + "grad_norm": 16.037246755664608, + "learning_rate": 1.053016689960157e-06, + "loss": 0.5184, + "step": 14745 + }, + { + "epoch": 0.6084839481719898, + "grad_norm": 6.334574096803098, + "learning_rate": 1.0528253257041082e-06, + "loss": 0.5523, + "step": 14746 + }, + { + "epoch": 0.6085252125113477, + "grad_norm": 1.9648001993233246, + "learning_rate": 1.0526339694357184e-06, + "loss": 0.5319, + "step": 14747 + }, + { + "epoch": 0.6085664768507056, + "grad_norm": 3.0731675717878946, + "learning_rate": 1.0524426211584047e-06, + "loss": 0.5444, + "step": 14748 + }, + { + "epoch": 0.6086077411900636, + "grad_norm": 2.084767297802879, + "learning_rate": 1.0522512808755864e-06, + "loss": 0.5181, + "step": 14749 + }, + { + "epoch": 0.6086490055294215, + "grad_norm": 12.43109186514483, + "learning_rate": 1.052059948590679e-06, + "loss": 0.6082, + "step": 14750 + }, + { + "epoch": 0.6086902698687794, + "grad_norm": 3.249736232132932, + "learning_rate": 1.0518686243071023e-06, + "loss": 0.507, + "step": 14751 + }, + { + "epoch": 0.6087315342081373, + "grad_norm": 4.953205745114685, + "learning_rate": 1.0516773080282724e-06, + "loss": 0.5423, + "step": 14752 + }, + { + "epoch": 0.6087727985474952, + "grad_norm": 3.58905720093281, + "learning_rate": 1.0514859997576078e-06, + "loss": 0.4935, + "step": 14753 + }, + { + "epoch": 0.6088140628868531, + "grad_norm": 4.502519072209061, + "learning_rate": 1.0512946994985248e-06, + "loss": 0.5668, + "step": 14754 + }, + { + "epoch": 0.6088553272262112, + "grad_norm": 5.14581667308786, + "learning_rate": 1.0511034072544412e-06, + "loss": 0.5557, + "step": 14755 + }, + { + "epoch": 0.6088965915655691, + "grad_norm": 4.438695028661993, + "learning_rate": 1.0509121230287737e-06, + "loss": 0.5517, + "step": 14756 + }, + { + "epoch": 0.608937855904927, + "grad_norm": 2.5626061794716524, + "learning_rate": 1.0507208468249391e-06, + "loss": 0.5115, + "step": 14757 + }, + { + "epoch": 0.6089791202442849, + "grad_norm": 2.7279562808743565, + "learning_rate": 1.050529578646354e-06, + "loss": 0.5602, + "step": 14758 + }, + { + "epoch": 0.6090203845836428, + "grad_norm": 2.7599339170196084, + "learning_rate": 1.0503383184964352e-06, + "loss": 0.5132, + "step": 14759 + }, + { + "epoch": 0.6090616489230007, + "grad_norm": 2.6508474718521846, + "learning_rate": 1.050147066378599e-06, + "loss": 0.4864, + "step": 14760 + }, + { + "epoch": 0.6091029132623587, + "grad_norm": 2.1873627842403116, + "learning_rate": 1.0499558222962614e-06, + "loss": 0.533, + "step": 14761 + }, + { + "epoch": 0.6091441776017166, + "grad_norm": 1.8485123559562147, + "learning_rate": 1.049764586252839e-06, + "loss": 0.4975, + "step": 14762 + }, + { + "epoch": 0.6091854419410745, + "grad_norm": 22.195178786289116, + "learning_rate": 1.049573358251747e-06, + "loss": 0.5036, + "step": 14763 + }, + { + "epoch": 0.6092267062804324, + "grad_norm": 2.6955488270755836, + "learning_rate": 1.0493821382964019e-06, + "loss": 0.549, + "step": 14764 + }, + { + "epoch": 0.6092679706197903, + "grad_norm": 2.980828035589508, + "learning_rate": 1.0491909263902185e-06, + "loss": 0.5253, + "step": 14765 + }, + { + "epoch": 0.6093092349591483, + "grad_norm": 4.28893149855634, + "learning_rate": 1.0489997225366135e-06, + "loss": 0.5478, + "step": 14766 + }, + { + "epoch": 0.6093504992985063, + "grad_norm": 11.706478611019474, + "learning_rate": 1.0488085267390013e-06, + "loss": 0.5181, + "step": 14767 + }, + { + "epoch": 0.6093917636378642, + "grad_norm": 3.776254993425667, + "learning_rate": 1.048617339000798e-06, + "loss": 0.6082, + "step": 14768 + }, + { + "epoch": 0.6094330279772221, + "grad_norm": 3.1869205794683553, + "learning_rate": 1.0484261593254177e-06, + "loss": 0.4996, + "step": 14769 + }, + { + "epoch": 0.60947429231658, + "grad_norm": 6.016096836344249, + "learning_rate": 1.048234987716276e-06, + "loss": 0.5482, + "step": 14770 + }, + { + "epoch": 0.6095155566559379, + "grad_norm": 1.898226629955531, + "learning_rate": 1.0480438241767877e-06, + "loss": 0.5098, + "step": 14771 + }, + { + "epoch": 0.6095568209952958, + "grad_norm": 2.8467166940101576, + "learning_rate": 1.0478526687103671e-06, + "loss": 0.5334, + "step": 14772 + }, + { + "epoch": 0.6095980853346538, + "grad_norm": 2.074480680836711, + "learning_rate": 1.0476615213204292e-06, + "loss": 0.5204, + "step": 14773 + }, + { + "epoch": 0.6096393496740117, + "grad_norm": 2.83004979786976, + "learning_rate": 1.047470382010388e-06, + "loss": 0.5829, + "step": 14774 + }, + { + "epoch": 0.6096806140133696, + "grad_norm": 3.3370407016196286, + "learning_rate": 1.0472792507836578e-06, + "loss": 0.5331, + "step": 14775 + }, + { + "epoch": 0.6097218783527276, + "grad_norm": 3.51099873855346, + "learning_rate": 1.0470881276436533e-06, + "loss": 0.5526, + "step": 14776 + }, + { + "epoch": 0.6097631426920855, + "grad_norm": 2.648488405098594, + "learning_rate": 1.0468970125937873e-06, + "loss": 0.5077, + "step": 14777 + }, + { + "epoch": 0.6098044070314435, + "grad_norm": 17.927246757184744, + "learning_rate": 1.046705905637474e-06, + "loss": 0.5417, + "step": 14778 + }, + { + "epoch": 0.6098456713708014, + "grad_norm": 2.636097410218457, + "learning_rate": 1.0465148067781272e-06, + "loss": 0.4538, + "step": 14779 + }, + { + "epoch": 0.6098869357101593, + "grad_norm": 2.6762639660522676, + "learning_rate": 1.0463237160191606e-06, + "loss": 0.5423, + "step": 14780 + }, + { + "epoch": 0.6099282000495172, + "grad_norm": 3.4395081412058413, + "learning_rate": 1.0461326333639871e-06, + "loss": 0.5993, + "step": 14781 + }, + { + "epoch": 0.6099694643888751, + "grad_norm": 2.3969159326592724, + "learning_rate": 1.0459415588160205e-06, + "loss": 0.4896, + "step": 14782 + }, + { + "epoch": 0.610010728728233, + "grad_norm": 3.5916113484157832, + "learning_rate": 1.0457504923786733e-06, + "loss": 0.5074, + "step": 14783 + }, + { + "epoch": 0.610051993067591, + "grad_norm": 3.2134083440068486, + "learning_rate": 1.0455594340553591e-06, + "loss": 0.5616, + "step": 14784 + }, + { + "epoch": 0.6100932574069489, + "grad_norm": 4.456123693090602, + "learning_rate": 1.0453683838494895e-06, + "loss": 0.5829, + "step": 14785 + }, + { + "epoch": 0.6101345217463069, + "grad_norm": 2.5814755099633797, + "learning_rate": 1.0451773417644784e-06, + "loss": 0.5021, + "step": 14786 + }, + { + "epoch": 0.6101757860856648, + "grad_norm": 2.389812509481508, + "learning_rate": 1.0449863078037376e-06, + "loss": 0.5369, + "step": 14787 + }, + { + "epoch": 0.6102170504250227, + "grad_norm": 3.3675185328188584, + "learning_rate": 1.0447952819706797e-06, + "loss": 0.524, + "step": 14788 + }, + { + "epoch": 0.6102583147643806, + "grad_norm": 3.0880213776621086, + "learning_rate": 1.0446042642687173e-06, + "loss": 0.581, + "step": 14789 + }, + { + "epoch": 0.6102995791037386, + "grad_norm": 2.502213413494387, + "learning_rate": 1.0444132547012615e-06, + "loss": 0.5177, + "step": 14790 + }, + { + "epoch": 0.6103408434430965, + "grad_norm": 2.6975309451731158, + "learning_rate": 1.0442222532717244e-06, + "loss": 0.4994, + "step": 14791 + }, + { + "epoch": 0.6103821077824544, + "grad_norm": 2.087599904579364, + "learning_rate": 1.0440312599835184e-06, + "loss": 0.5199, + "step": 14792 + }, + { + "epoch": 0.6104233721218123, + "grad_norm": 3.5624275431176753, + "learning_rate": 1.0438402748400545e-06, + "loss": 0.5213, + "step": 14793 + }, + { + "epoch": 0.6104646364611702, + "grad_norm": 16.20951749391419, + "learning_rate": 1.0436492978447447e-06, + "loss": 0.5245, + "step": 14794 + }, + { + "epoch": 0.6105059008005281, + "grad_norm": 5.119112157313262, + "learning_rate": 1.0434583290009995e-06, + "loss": 0.4897, + "step": 14795 + }, + { + "epoch": 0.6105471651398862, + "grad_norm": 2.120286535096167, + "learning_rate": 1.0432673683122314e-06, + "loss": 0.5681, + "step": 14796 + }, + { + "epoch": 0.6105884294792441, + "grad_norm": 4.476440460069592, + "learning_rate": 1.04307641578185e-06, + "loss": 0.4427, + "step": 14797 + }, + { + "epoch": 0.610629693818602, + "grad_norm": 2.3408393268250673, + "learning_rate": 1.0428854714132673e-06, + "loss": 0.5483, + "step": 14798 + }, + { + "epoch": 0.6106709581579599, + "grad_norm": 2.875175919328026, + "learning_rate": 1.0426945352098932e-06, + "loss": 0.5063, + "step": 14799 + }, + { + "epoch": 0.6107122224973178, + "grad_norm": 2.0409007039450295, + "learning_rate": 1.0425036071751393e-06, + "loss": 0.5105, + "step": 14800 + }, + { + "epoch": 0.6107534868366757, + "grad_norm": 2.918458432518369, + "learning_rate": 1.042312687312415e-06, + "loss": 0.506, + "step": 14801 + }, + { + "epoch": 0.6107947511760337, + "grad_norm": 3.157438987758854, + "learning_rate": 1.0421217756251315e-06, + "loss": 0.4948, + "step": 14802 + }, + { + "epoch": 0.6108360155153916, + "grad_norm": 3.054777715934904, + "learning_rate": 1.0419308721166984e-06, + "loss": 0.5341, + "step": 14803 + }, + { + "epoch": 0.6108772798547495, + "grad_norm": 3.1595520764678815, + "learning_rate": 1.0417399767905256e-06, + "loss": 0.5204, + "step": 14804 + }, + { + "epoch": 0.6109185441941074, + "grad_norm": 2.372038168040659, + "learning_rate": 1.0415490896500232e-06, + "loss": 0.5447, + "step": 14805 + }, + { + "epoch": 0.6109598085334654, + "grad_norm": 5.93020102252876, + "learning_rate": 1.041358210698601e-06, + "loss": 0.5169, + "step": 14806 + }, + { + "epoch": 0.6110010728728233, + "grad_norm": 2.4165802455943823, + "learning_rate": 1.0411673399396684e-06, + "loss": 0.4933, + "step": 14807 + }, + { + "epoch": 0.6110423372121813, + "grad_norm": 2.8805486262795514, + "learning_rate": 1.0409764773766349e-06, + "loss": 0.5462, + "step": 14808 + }, + { + "epoch": 0.6110836015515392, + "grad_norm": 2.879319360973821, + "learning_rate": 1.0407856230129102e-06, + "loss": 0.5069, + "step": 14809 + }, + { + "epoch": 0.6111248658908971, + "grad_norm": 2.553537590793911, + "learning_rate": 1.040594776851903e-06, + "loss": 0.5178, + "step": 14810 + }, + { + "epoch": 0.611166130230255, + "grad_norm": 6.221826508368972, + "learning_rate": 1.0404039388970218e-06, + "loss": 0.4928, + "step": 14811 + }, + { + "epoch": 0.6112073945696129, + "grad_norm": 2.3716811311106754, + "learning_rate": 1.0402131091516763e-06, + "loss": 0.5618, + "step": 14812 + }, + { + "epoch": 0.6112486589089708, + "grad_norm": 3.8098793735375294, + "learning_rate": 1.040022287619275e-06, + "loss": 0.5164, + "step": 14813 + }, + { + "epoch": 0.6112899232483288, + "grad_norm": 3.1443956479046937, + "learning_rate": 1.0398314743032262e-06, + "loss": 0.552, + "step": 14814 + }, + { + "epoch": 0.6113311875876867, + "grad_norm": 3.7731769932066475, + "learning_rate": 1.0396406692069388e-06, + "loss": 0.5453, + "step": 14815 + }, + { + "epoch": 0.6113724519270447, + "grad_norm": 2.5555781676978113, + "learning_rate": 1.0394498723338202e-06, + "loss": 0.5014, + "step": 14816 + }, + { + "epoch": 0.6114137162664026, + "grad_norm": 2.211703536773194, + "learning_rate": 1.0392590836872794e-06, + "loss": 0.4782, + "step": 14817 + }, + { + "epoch": 0.6114549806057605, + "grad_norm": 2.6111231040774814, + "learning_rate": 1.0390683032707236e-06, + "loss": 0.522, + "step": 14818 + }, + { + "epoch": 0.6114962449451185, + "grad_norm": 3.4429024424298342, + "learning_rate": 1.0388775310875612e-06, + "loss": 0.5099, + "step": 14819 + }, + { + "epoch": 0.6115375092844764, + "grad_norm": 1.7858694457891469, + "learning_rate": 1.0386867671411992e-06, + "loss": 0.545, + "step": 14820 + }, + { + "epoch": 0.6115787736238343, + "grad_norm": 2.2553658669958674, + "learning_rate": 1.038496011435046e-06, + "loss": 0.5475, + "step": 14821 + }, + { + "epoch": 0.6116200379631922, + "grad_norm": 2.9395417602589893, + "learning_rate": 1.0383052639725084e-06, + "loss": 0.5423, + "step": 14822 + }, + { + "epoch": 0.6116613023025501, + "grad_norm": 2.9124514999801434, + "learning_rate": 1.038114524756994e-06, + "loss": 0.541, + "step": 14823 + }, + { + "epoch": 0.611702566641908, + "grad_norm": 2.3738399489223774, + "learning_rate": 1.0379237937919092e-06, + "loss": 0.5218, + "step": 14824 + }, + { + "epoch": 0.611743830981266, + "grad_norm": 3.3345911524281804, + "learning_rate": 1.0377330710806617e-06, + "loss": 0.5108, + "step": 14825 + }, + { + "epoch": 0.6117850953206239, + "grad_norm": 2.8304065253311412, + "learning_rate": 1.0375423566266578e-06, + "loss": 0.5117, + "step": 14826 + }, + { + "epoch": 0.6118263596599819, + "grad_norm": 6.293583224251729, + "learning_rate": 1.0373516504333045e-06, + "loss": 0.4931, + "step": 14827 + }, + { + "epoch": 0.6118676239993398, + "grad_norm": 2.365459823977945, + "learning_rate": 1.0371609525040083e-06, + "loss": 0.5703, + "step": 14828 + }, + { + "epoch": 0.6119088883386977, + "grad_norm": 2.111388583444493, + "learning_rate": 1.0369702628421753e-06, + "loss": 0.5153, + "step": 14829 + }, + { + "epoch": 0.6119501526780556, + "grad_norm": 5.570618008717394, + "learning_rate": 1.0367795814512114e-06, + "loss": 0.505, + "step": 14830 + }, + { + "epoch": 0.6119914170174136, + "grad_norm": 2.2586579657222083, + "learning_rate": 1.0365889083345231e-06, + "loss": 0.4727, + "step": 14831 + }, + { + "epoch": 0.6120326813567715, + "grad_norm": 2.3751569805635016, + "learning_rate": 1.036398243495516e-06, + "loss": 0.5118, + "step": 14832 + }, + { + "epoch": 0.6120739456961294, + "grad_norm": 2.6081437405698082, + "learning_rate": 1.0362075869375964e-06, + "loss": 0.4738, + "step": 14833 + }, + { + "epoch": 0.6121152100354873, + "grad_norm": 2.1166343653273754, + "learning_rate": 1.036016938664169e-06, + "loss": 0.5774, + "step": 14834 + }, + { + "epoch": 0.6121564743748452, + "grad_norm": 2.010086969112182, + "learning_rate": 1.0358262986786403e-06, + "loss": 0.564, + "step": 14835 + }, + { + "epoch": 0.6121977387142031, + "grad_norm": 2.7682788806308265, + "learning_rate": 1.0356356669844146e-06, + "loss": 0.517, + "step": 14836 + }, + { + "epoch": 0.6122390030535612, + "grad_norm": 2.8717384769774426, + "learning_rate": 1.035445043584898e-06, + "loss": 0.5185, + "step": 14837 + }, + { + "epoch": 0.6122802673929191, + "grad_norm": 8.645923616650126, + "learning_rate": 1.0352544284834946e-06, + "loss": 0.5341, + "step": 14838 + }, + { + "epoch": 0.612321531732277, + "grad_norm": 4.134593894655728, + "learning_rate": 1.03506382168361e-06, + "loss": 0.4605, + "step": 14839 + }, + { + "epoch": 0.6123627960716349, + "grad_norm": 2.2350678483180255, + "learning_rate": 1.0348732231886484e-06, + "loss": 0.4781, + "step": 14840 + }, + { + "epoch": 0.6124040604109928, + "grad_norm": 4.8469589539525275, + "learning_rate": 1.0346826330020153e-06, + "loss": 0.5428, + "step": 14841 + }, + { + "epoch": 0.6124453247503507, + "grad_norm": 2.1810130673830006, + "learning_rate": 1.034492051127114e-06, + "loss": 0.4796, + "step": 14842 + }, + { + "epoch": 0.6124865890897087, + "grad_norm": 2.583458742165137, + "learning_rate": 1.034301477567349e-06, + "loss": 0.544, + "step": 14843 + }, + { + "epoch": 0.6125278534290666, + "grad_norm": 2.1785123987252866, + "learning_rate": 1.034110912326125e-06, + "loss": 0.4933, + "step": 14844 + }, + { + "epoch": 0.6125691177684245, + "grad_norm": 3.497966596870382, + "learning_rate": 1.0339203554068452e-06, + "loss": 0.5176, + "step": 14845 + }, + { + "epoch": 0.6126103821077824, + "grad_norm": 3.8102380659679858, + "learning_rate": 1.033729806812914e-06, + "loss": 0.5015, + "step": 14846 + }, + { + "epoch": 0.6126516464471404, + "grad_norm": 7.907573510354625, + "learning_rate": 1.0335392665477346e-06, + "loss": 0.4972, + "step": 14847 + }, + { + "epoch": 0.6126929107864983, + "grad_norm": 7.25454322426351, + "learning_rate": 1.0333487346147112e-06, + "loss": 0.4889, + "step": 14848 + }, + { + "epoch": 0.6127341751258563, + "grad_norm": 1.8426590890671954, + "learning_rate": 1.0331582110172467e-06, + "loss": 0.5326, + "step": 14849 + }, + { + "epoch": 0.6127754394652142, + "grad_norm": 3.716988432324978, + "learning_rate": 1.032967695758744e-06, + "loss": 0.5335, + "step": 14850 + }, + { + "epoch": 0.6128167038045721, + "grad_norm": 5.102329746991089, + "learning_rate": 1.0327771888426071e-06, + "loss": 0.5998, + "step": 14851 + }, + { + "epoch": 0.61285796814393, + "grad_norm": 2.3952741500814496, + "learning_rate": 1.032586690272238e-06, + "loss": 0.5031, + "step": 14852 + }, + { + "epoch": 0.6128992324832879, + "grad_norm": 3.048249711103171, + "learning_rate": 1.0323962000510403e-06, + "loss": 0.4479, + "step": 14853 + }, + { + "epoch": 0.6129404968226458, + "grad_norm": 4.5644410028865865, + "learning_rate": 1.0322057181824164e-06, + "loss": 0.514, + "step": 14854 + }, + { + "epoch": 0.6129817611620038, + "grad_norm": 1.8537070562504594, + "learning_rate": 1.032015244669768e-06, + "loss": 0.4895, + "step": 14855 + }, + { + "epoch": 0.6130230255013617, + "grad_norm": 6.857980398827388, + "learning_rate": 1.0318247795164985e-06, + "loss": 0.511, + "step": 14856 + }, + { + "epoch": 0.6130642898407197, + "grad_norm": 2.3498085233523596, + "learning_rate": 1.0316343227260093e-06, + "loss": 0.5302, + "step": 14857 + }, + { + "epoch": 0.6131055541800776, + "grad_norm": 1.9468852718994003, + "learning_rate": 1.031443874301703e-06, + "loss": 0.5098, + "step": 14858 + }, + { + "epoch": 0.6131468185194355, + "grad_norm": 6.542356367170385, + "learning_rate": 1.031253434246981e-06, + "loss": 0.5521, + "step": 14859 + }, + { + "epoch": 0.6131880828587934, + "grad_norm": 3.223299594819006, + "learning_rate": 1.0310630025652456e-06, + "loss": 0.4833, + "step": 14860 + }, + { + "epoch": 0.6132293471981514, + "grad_norm": 2.4913486936758753, + "learning_rate": 1.0308725792598978e-06, + "loss": 0.5302, + "step": 14861 + }, + { + "epoch": 0.6132706115375093, + "grad_norm": 3.9754757579702695, + "learning_rate": 1.0306821643343394e-06, + "loss": 0.4567, + "step": 14862 + }, + { + "epoch": 0.6133118758768672, + "grad_norm": 4.1104320413977575, + "learning_rate": 1.0304917577919715e-06, + "loss": 0.4864, + "step": 14863 + }, + { + "epoch": 0.6133531402162251, + "grad_norm": 2.900404972902716, + "learning_rate": 1.0303013596361957e-06, + "loss": 0.4914, + "step": 14864 + }, + { + "epoch": 0.613394404555583, + "grad_norm": 3.6095310760398784, + "learning_rate": 1.0301109698704123e-06, + "loss": 0.495, + "step": 14865 + }, + { + "epoch": 0.6134356688949409, + "grad_norm": 2.086084432408596, + "learning_rate": 1.0299205884980226e-06, + "loss": 0.4968, + "step": 14866 + }, + { + "epoch": 0.613476933234299, + "grad_norm": 2.898573927098439, + "learning_rate": 1.029730215522427e-06, + "loss": 0.517, + "step": 14867 + }, + { + "epoch": 0.6135181975736569, + "grad_norm": 4.223883084544181, + "learning_rate": 1.0295398509470269e-06, + "loss": 0.5633, + "step": 14868 + }, + { + "epoch": 0.6135594619130148, + "grad_norm": 3.743428233548753, + "learning_rate": 1.0293494947752213e-06, + "loss": 0.4558, + "step": 14869 + }, + { + "epoch": 0.6136007262523727, + "grad_norm": 2.618712375363381, + "learning_rate": 1.0291591470104113e-06, + "loss": 0.5308, + "step": 14870 + }, + { + "epoch": 0.6136419905917306, + "grad_norm": 2.30635131643233, + "learning_rate": 1.0289688076559964e-06, + "loss": 0.5644, + "step": 14871 + }, + { + "epoch": 0.6136832549310886, + "grad_norm": 2.159729192084894, + "learning_rate": 1.0287784767153774e-06, + "loss": 0.5183, + "step": 14872 + }, + { + "epoch": 0.6137245192704465, + "grad_norm": 2.9063661542421113, + "learning_rate": 1.0285881541919531e-06, + "loss": 0.4919, + "step": 14873 + }, + { + "epoch": 0.6137657836098044, + "grad_norm": 3.449374256006231, + "learning_rate": 1.0283978400891241e-06, + "loss": 0.4412, + "step": 14874 + }, + { + "epoch": 0.6138070479491623, + "grad_norm": 3.8327219081381063, + "learning_rate": 1.0282075344102892e-06, + "loss": 0.5269, + "step": 14875 + }, + { + "epoch": 0.6138483122885202, + "grad_norm": 2.568892795904922, + "learning_rate": 1.0280172371588482e-06, + "loss": 0.554, + "step": 14876 + }, + { + "epoch": 0.6138895766278782, + "grad_norm": 8.404536684066864, + "learning_rate": 1.0278269483381998e-06, + "loss": 0.6192, + "step": 14877 + }, + { + "epoch": 0.6139308409672362, + "grad_norm": 2.7131015318233627, + "learning_rate": 1.0276366679517436e-06, + "loss": 0.516, + "step": 14878 + }, + { + "epoch": 0.6139721053065941, + "grad_norm": 5.268234594807625, + "learning_rate": 1.027446396002878e-06, + "loss": 0.4914, + "step": 14879 + }, + { + "epoch": 0.614013369645952, + "grad_norm": 17.55538106657317, + "learning_rate": 1.027256132495002e-06, + "loss": 0.5411, + "step": 14880 + }, + { + "epoch": 0.6140546339853099, + "grad_norm": 3.0822331657654916, + "learning_rate": 1.0270658774315144e-06, + "loss": 0.5018, + "step": 14881 + }, + { + "epoch": 0.6140958983246678, + "grad_norm": 7.580920429174559, + "learning_rate": 1.026875630815813e-06, + "loss": 0.5565, + "step": 14882 + }, + { + "epoch": 0.6141371626640257, + "grad_norm": 2.1374472795263317, + "learning_rate": 1.0266853926512966e-06, + "loss": 0.4938, + "step": 14883 + }, + { + "epoch": 0.6141784270033837, + "grad_norm": 2.7576993648797448, + "learning_rate": 1.0264951629413626e-06, + "loss": 0.4923, + "step": 14884 + }, + { + "epoch": 0.6142196913427416, + "grad_norm": 2.267998824429099, + "learning_rate": 1.0263049416894101e-06, + "loss": 0.5152, + "step": 14885 + }, + { + "epoch": 0.6142609556820995, + "grad_norm": 4.477942891738161, + "learning_rate": 1.0261147288988359e-06, + "loss": 0.4917, + "step": 14886 + }, + { + "epoch": 0.6143022200214574, + "grad_norm": 2.3046234129025436, + "learning_rate": 1.0259245245730383e-06, + "loss": 0.5144, + "step": 14887 + }, + { + "epoch": 0.6143434843608154, + "grad_norm": 4.163390915031739, + "learning_rate": 1.025734328715415e-06, + "loss": 0.5179, + "step": 14888 + }, + { + "epoch": 0.6143847487001733, + "grad_norm": 2.3126453070330935, + "learning_rate": 1.0255441413293623e-06, + "loss": 0.5306, + "step": 14889 + }, + { + "epoch": 0.6144260130395313, + "grad_norm": 3.727921027455353, + "learning_rate": 1.0253539624182789e-06, + "loss": 0.4787, + "step": 14890 + }, + { + "epoch": 0.6144672773788892, + "grad_norm": 2.6576308138571516, + "learning_rate": 1.0251637919855605e-06, + "loss": 0.577, + "step": 14891 + }, + { + "epoch": 0.6145085417182471, + "grad_norm": 3.833761834016179, + "learning_rate": 1.024973630034605e-06, + "loss": 0.5445, + "step": 14892 + }, + { + "epoch": 0.614549806057605, + "grad_norm": 8.176202134359393, + "learning_rate": 1.0247834765688084e-06, + "loss": 0.5174, + "step": 14893 + }, + { + "epoch": 0.6145910703969629, + "grad_norm": 3.28401716025096, + "learning_rate": 1.0245933315915686e-06, + "loss": 0.52, + "step": 14894 + }, + { + "epoch": 0.6146323347363208, + "grad_norm": 2.731196602060069, + "learning_rate": 1.0244031951062808e-06, + "loss": 0.5468, + "step": 14895 + }, + { + "epoch": 0.6146735990756788, + "grad_norm": 9.346700696991029, + "learning_rate": 1.0242130671163412e-06, + "loss": 0.4934, + "step": 14896 + }, + { + "epoch": 0.6147148634150367, + "grad_norm": 4.586429264409693, + "learning_rate": 1.024022947625147e-06, + "loss": 0.5201, + "step": 14897 + }, + { + "epoch": 0.6147561277543947, + "grad_norm": 7.398184998991901, + "learning_rate": 1.023832836636093e-06, + "loss": 0.513, + "step": 14898 + }, + { + "epoch": 0.6147973920937526, + "grad_norm": 3.331855959373017, + "learning_rate": 1.0236427341525763e-06, + "loss": 0.5169, + "step": 14899 + }, + { + "epoch": 0.6148386564331105, + "grad_norm": 12.851369158417283, + "learning_rate": 1.0234526401779919e-06, + "loss": 0.5302, + "step": 14900 + }, + { + "epoch": 0.6148799207724684, + "grad_norm": 2.7181213451238633, + "learning_rate": 1.0232625547157353e-06, + "loss": 0.5395, + "step": 14901 + }, + { + "epoch": 0.6149211851118264, + "grad_norm": 2.1513983641233567, + "learning_rate": 1.0230724777692023e-06, + "loss": 0.5657, + "step": 14902 + }, + { + "epoch": 0.6149624494511843, + "grad_norm": 4.224981557551481, + "learning_rate": 1.022882409341788e-06, + "loss": 0.5353, + "step": 14903 + }, + { + "epoch": 0.6150037137905422, + "grad_norm": 2.890982554761667, + "learning_rate": 1.022692349436887e-06, + "loss": 0.5023, + "step": 14904 + }, + { + "epoch": 0.6150449781299001, + "grad_norm": 2.3426494819096257, + "learning_rate": 1.0225022980578952e-06, + "loss": 0.5084, + "step": 14905 + }, + { + "epoch": 0.615086242469258, + "grad_norm": 2.180799428151169, + "learning_rate": 1.0223122552082062e-06, + "loss": 0.5207, + "step": 14906 + }, + { + "epoch": 0.6151275068086159, + "grad_norm": 1.860237563686597, + "learning_rate": 1.0221222208912168e-06, + "loss": 0.5267, + "step": 14907 + }, + { + "epoch": 0.615168771147974, + "grad_norm": 6.259184278631877, + "learning_rate": 1.0219321951103188e-06, + "loss": 0.5327, + "step": 14908 + }, + { + "epoch": 0.6152100354873319, + "grad_norm": 3.428264208554663, + "learning_rate": 1.0217421778689079e-06, + "loss": 0.5603, + "step": 14909 + }, + { + "epoch": 0.6152512998266898, + "grad_norm": 3.027380694359031, + "learning_rate": 1.021552169170378e-06, + "loss": 0.524, + "step": 14910 + }, + { + "epoch": 0.6152925641660477, + "grad_norm": 2.768837829725378, + "learning_rate": 1.0213621690181237e-06, + "loss": 0.5513, + "step": 14911 + }, + { + "epoch": 0.6153338285054056, + "grad_norm": 7.178591712751267, + "learning_rate": 1.0211721774155379e-06, + "loss": 0.5431, + "step": 14912 + }, + { + "epoch": 0.6153750928447635, + "grad_norm": 5.434639944475165, + "learning_rate": 1.020982194366015e-06, + "loss": 0.537, + "step": 14913 + }, + { + "epoch": 0.6154163571841215, + "grad_norm": 2.3040308243410754, + "learning_rate": 1.0207922198729484e-06, + "loss": 0.5459, + "step": 14914 + }, + { + "epoch": 0.6154576215234794, + "grad_norm": 2.8388906330972676, + "learning_rate": 1.0206022539397316e-06, + "loss": 0.505, + "step": 14915 + }, + { + "epoch": 0.6154988858628373, + "grad_norm": 3.76959887628997, + "learning_rate": 1.020412296569758e-06, + "loss": 0.518, + "step": 14916 + }, + { + "epoch": 0.6155401502021952, + "grad_norm": 2.2121838184135236, + "learning_rate": 1.0202223477664202e-06, + "loss": 0.4821, + "step": 14917 + }, + { + "epoch": 0.6155814145415532, + "grad_norm": 3.1304557733362914, + "learning_rate": 1.0200324075331116e-06, + "loss": 0.5171, + "step": 14918 + }, + { + "epoch": 0.6156226788809112, + "grad_norm": 1.828757419922618, + "learning_rate": 1.019842475873225e-06, + "loss": 0.5082, + "step": 14919 + }, + { + "epoch": 0.6156639432202691, + "grad_norm": 21.177050351553685, + "learning_rate": 1.0196525527901536e-06, + "loss": 0.5306, + "step": 14920 + }, + { + "epoch": 0.615705207559627, + "grad_norm": 3.78446661632811, + "learning_rate": 1.0194626382872883e-06, + "loss": 0.5523, + "step": 14921 + }, + { + "epoch": 0.6157464718989849, + "grad_norm": 4.885753092318465, + "learning_rate": 1.019272732368023e-06, + "loss": 0.5123, + "step": 14922 + }, + { + "epoch": 0.6157877362383428, + "grad_norm": 3.9977634600563547, + "learning_rate": 1.0190828350357488e-06, + "loss": 0.5428, + "step": 14923 + }, + { + "epoch": 0.6158290005777007, + "grad_norm": 1.9615591239421004, + "learning_rate": 1.0188929462938583e-06, + "loss": 0.5245, + "step": 14924 + }, + { + "epoch": 0.6158702649170587, + "grad_norm": 3.494431796892907, + "learning_rate": 1.0187030661457434e-06, + "loss": 0.5308, + "step": 14925 + }, + { + "epoch": 0.6159115292564166, + "grad_norm": 4.368499738157095, + "learning_rate": 1.018513194594796e-06, + "loss": 0.534, + "step": 14926 + }, + { + "epoch": 0.6159527935957745, + "grad_norm": 6.6505604696168445, + "learning_rate": 1.0183233316444073e-06, + "loss": 0.5295, + "step": 14927 + }, + { + "epoch": 0.6159940579351325, + "grad_norm": 6.925621139481874, + "learning_rate": 1.0181334772979687e-06, + "loss": 0.5031, + "step": 14928 + }, + { + "epoch": 0.6160353222744904, + "grad_norm": 2.4242601461778186, + "learning_rate": 1.0179436315588717e-06, + "loss": 0.5102, + "step": 14929 + }, + { + "epoch": 0.6160765866138483, + "grad_norm": 2.183616279861457, + "learning_rate": 1.0177537944305074e-06, + "loss": 0.5423, + "step": 14930 + }, + { + "epoch": 0.6161178509532063, + "grad_norm": 7.166032888381699, + "learning_rate": 1.0175639659162667e-06, + "loss": 0.5204, + "step": 14931 + }, + { + "epoch": 0.6161591152925642, + "grad_norm": 2.166806573008508, + "learning_rate": 1.0173741460195404e-06, + "loss": 0.4848, + "step": 14932 + }, + { + "epoch": 0.6162003796319221, + "grad_norm": 2.026144410452077, + "learning_rate": 1.0171843347437198e-06, + "loss": 0.5511, + "step": 14933 + }, + { + "epoch": 0.61624164397128, + "grad_norm": 2.7527631831182027, + "learning_rate": 1.0169945320921943e-06, + "loss": 0.4806, + "step": 14934 + }, + { + "epoch": 0.6162829083106379, + "grad_norm": 3.252957615172104, + "learning_rate": 1.0168047380683545e-06, + "loss": 0.5303, + "step": 14935 + }, + { + "epoch": 0.6163241726499958, + "grad_norm": 3.6679977959288186, + "learning_rate": 1.0166149526755911e-06, + "loss": 0.4781, + "step": 14936 + }, + { + "epoch": 0.6163654369893538, + "grad_norm": 2.175387787880955, + "learning_rate": 1.0164251759172938e-06, + "loss": 0.5619, + "step": 14937 + }, + { + "epoch": 0.6164067013287118, + "grad_norm": 2.126662491892934, + "learning_rate": 1.0162354077968527e-06, + "loss": 0.5315, + "step": 14938 + }, + { + "epoch": 0.6164479656680697, + "grad_norm": 1.8695773558352322, + "learning_rate": 1.0160456483176571e-06, + "loss": 0.5254, + "step": 14939 + }, + { + "epoch": 0.6164892300074276, + "grad_norm": 3.5634386860519833, + "learning_rate": 1.0158558974830973e-06, + "loss": 0.523, + "step": 14940 + }, + { + "epoch": 0.6165304943467855, + "grad_norm": 2.614163468180377, + "learning_rate": 1.015666155296562e-06, + "loss": 0.5596, + "step": 14941 + }, + { + "epoch": 0.6165717586861434, + "grad_norm": 3.0261494359028602, + "learning_rate": 1.015476421761441e-06, + "loss": 0.5478, + "step": 14942 + }, + { + "epoch": 0.6166130230255014, + "grad_norm": 4.439362868613149, + "learning_rate": 1.015286696881123e-06, + "loss": 0.5399, + "step": 14943 + }, + { + "epoch": 0.6166542873648593, + "grad_norm": 2.4867124714560442, + "learning_rate": 1.0150969806589974e-06, + "loss": 0.4973, + "step": 14944 + }, + { + "epoch": 0.6166955517042172, + "grad_norm": 3.01710615206231, + "learning_rate": 1.0149072730984526e-06, + "loss": 0.541, + "step": 14945 + }, + { + "epoch": 0.6167368160435751, + "grad_norm": 2.3120035882696337, + "learning_rate": 1.0147175742028782e-06, + "loss": 0.5113, + "step": 14946 + }, + { + "epoch": 0.616778080382933, + "grad_norm": 2.5905475395194237, + "learning_rate": 1.014527883975661e-06, + "loss": 0.5225, + "step": 14947 + }, + { + "epoch": 0.6168193447222909, + "grad_norm": 1.9504107336013936, + "learning_rate": 1.0143382024201904e-06, + "loss": 0.5353, + "step": 14948 + }, + { + "epoch": 0.616860609061649, + "grad_norm": 6.191817612948791, + "learning_rate": 1.0141485295398545e-06, + "loss": 0.5105, + "step": 14949 + }, + { + "epoch": 0.6169018734010069, + "grad_norm": 2.5214954816730146, + "learning_rate": 1.0139588653380412e-06, + "loss": 0.5238, + "step": 14950 + }, + { + "epoch": 0.6169431377403648, + "grad_norm": 2.502217058888599, + "learning_rate": 1.0137692098181385e-06, + "loss": 0.5238, + "step": 14951 + }, + { + "epoch": 0.6169844020797227, + "grad_norm": 4.5436984190404175, + "learning_rate": 1.0135795629835341e-06, + "loss": 0.509, + "step": 14952 + }, + { + "epoch": 0.6170256664190806, + "grad_norm": 3.6357746893329383, + "learning_rate": 1.0133899248376154e-06, + "loss": 0.5057, + "step": 14953 + }, + { + "epoch": 0.6170669307584385, + "grad_norm": 2.6040217250870574, + "learning_rate": 1.0132002953837703e-06, + "loss": 0.4956, + "step": 14954 + }, + { + "epoch": 0.6171081950977965, + "grad_norm": 22.846766991906033, + "learning_rate": 1.0130106746253852e-06, + "loss": 0.5658, + "step": 14955 + }, + { + "epoch": 0.6171494594371544, + "grad_norm": 2.119885249962252, + "learning_rate": 1.012821062565848e-06, + "loss": 0.4862, + "step": 14956 + }, + { + "epoch": 0.6171907237765123, + "grad_norm": 3.943134226294418, + "learning_rate": 1.0126314592085456e-06, + "loss": 0.5289, + "step": 14957 + }, + { + "epoch": 0.6172319881158702, + "grad_norm": 3.546718556222608, + "learning_rate": 1.0124418645568643e-06, + "loss": 0.5404, + "step": 14958 + }, + { + "epoch": 0.6172732524552282, + "grad_norm": 2.90675934359309, + "learning_rate": 1.0122522786141907e-06, + "loss": 0.481, + "step": 14959 + }, + { + "epoch": 0.6173145167945862, + "grad_norm": 4.969508499962774, + "learning_rate": 1.0120627013839125e-06, + "loss": 0.5719, + "step": 14960 + }, + { + "epoch": 0.6173557811339441, + "grad_norm": 2.194257037371419, + "learning_rate": 1.0118731328694145e-06, + "loss": 0.4743, + "step": 14961 + }, + { + "epoch": 0.617397045473302, + "grad_norm": 2.3097165918450115, + "learning_rate": 1.0116835730740834e-06, + "loss": 0.5092, + "step": 14962 + }, + { + "epoch": 0.6174383098126599, + "grad_norm": 3.1016454435177634, + "learning_rate": 1.0114940220013054e-06, + "loss": 0.5176, + "step": 14963 + }, + { + "epoch": 0.6174795741520178, + "grad_norm": 1.9383291095365687, + "learning_rate": 1.011304479654466e-06, + "loss": 0.5084, + "step": 14964 + }, + { + "epoch": 0.6175208384913757, + "grad_norm": 2.390426009950695, + "learning_rate": 1.0111149460369511e-06, + "loss": 0.4925, + "step": 14965 + }, + { + "epoch": 0.6175621028307337, + "grad_norm": 1.8336725690388662, + "learning_rate": 1.0109254211521465e-06, + "loss": 0.4982, + "step": 14966 + }, + { + "epoch": 0.6176033671700916, + "grad_norm": 2.151787230529819, + "learning_rate": 1.010735905003437e-06, + "loss": 0.4714, + "step": 14967 + }, + { + "epoch": 0.6176446315094495, + "grad_norm": 3.7656279880747405, + "learning_rate": 1.0105463975942086e-06, + "loss": 0.4768, + "step": 14968 + }, + { + "epoch": 0.6176858958488075, + "grad_norm": 2.432340145352766, + "learning_rate": 1.0103568989278456e-06, + "loss": 0.5054, + "step": 14969 + }, + { + "epoch": 0.6177271601881654, + "grad_norm": 12.320640337487475, + "learning_rate": 1.0101674090077338e-06, + "loss": 0.5114, + "step": 14970 + }, + { + "epoch": 0.6177684245275233, + "grad_norm": 2.4886967373976403, + "learning_rate": 1.0099779278372569e-06, + "loss": 0.5868, + "step": 14971 + }, + { + "epoch": 0.6178096888668813, + "grad_norm": 3.1112273114753606, + "learning_rate": 1.0097884554198003e-06, + "loss": 0.4982, + "step": 14972 + }, + { + "epoch": 0.6178509532062392, + "grad_norm": 1.9611299895520162, + "learning_rate": 1.0095989917587487e-06, + "loss": 0.5399, + "step": 14973 + }, + { + "epoch": 0.6178922175455971, + "grad_norm": 2.467423303621019, + "learning_rate": 1.0094095368574852e-06, + "loss": 0.49, + "step": 14974 + }, + { + "epoch": 0.617933481884955, + "grad_norm": 2.7207773784984695, + "learning_rate": 1.0092200907193949e-06, + "loss": 0.5168, + "step": 14975 + }, + { + "epoch": 0.6179747462243129, + "grad_norm": 3.6110463073373658, + "learning_rate": 1.0090306533478615e-06, + "loss": 0.5321, + "step": 14976 + }, + { + "epoch": 0.6180160105636708, + "grad_norm": 3.330017497874921, + "learning_rate": 1.0088412247462689e-06, + "loss": 0.6143, + "step": 14977 + }, + { + "epoch": 0.6180572749030288, + "grad_norm": 2.523844690829992, + "learning_rate": 1.0086518049180002e-06, + "loss": 0.5186, + "step": 14978 + }, + { + "epoch": 0.6180985392423868, + "grad_norm": 2.3129969097435987, + "learning_rate": 1.00846239386644e-06, + "loss": 0.5573, + "step": 14979 + }, + { + "epoch": 0.6181398035817447, + "grad_norm": 2.1383676263725575, + "learning_rate": 1.0082729915949707e-06, + "loss": 0.4717, + "step": 14980 + }, + { + "epoch": 0.6181810679211026, + "grad_norm": 5.73421554750309, + "learning_rate": 1.0080835981069764e-06, + "loss": 0.5298, + "step": 14981 + }, + { + "epoch": 0.6182223322604605, + "grad_norm": 3.3017291657811194, + "learning_rate": 1.007894213405839e-06, + "loss": 0.5811, + "step": 14982 + }, + { + "epoch": 0.6182635965998184, + "grad_norm": 1.989365587456333, + "learning_rate": 1.0077048374949425e-06, + "loss": 0.4939, + "step": 14983 + }, + { + "epoch": 0.6183048609391764, + "grad_norm": 3.0430299686354916, + "learning_rate": 1.0075154703776688e-06, + "loss": 0.5376, + "step": 14984 + }, + { + "epoch": 0.6183461252785343, + "grad_norm": 3.323592766264037, + "learning_rate": 1.0073261120574012e-06, + "loss": 0.48, + "step": 14985 + }, + { + "epoch": 0.6183873896178922, + "grad_norm": 3.7677939858524128, + "learning_rate": 1.0071367625375222e-06, + "loss": 0.4928, + "step": 14986 + }, + { + "epoch": 0.6184286539572501, + "grad_norm": 2.2507427923811982, + "learning_rate": 1.006947421821413e-06, + "loss": 0.5206, + "step": 14987 + }, + { + "epoch": 0.618469918296608, + "grad_norm": 2.2354428670069506, + "learning_rate": 1.006758089912456e-06, + "loss": 0.5323, + "step": 14988 + }, + { + "epoch": 0.618511182635966, + "grad_norm": 2.4912535841770063, + "learning_rate": 1.0065687668140336e-06, + "loss": 0.5079, + "step": 14989 + }, + { + "epoch": 0.618552446975324, + "grad_norm": 3.1232826315823146, + "learning_rate": 1.0063794525295271e-06, + "loss": 0.4861, + "step": 14990 + }, + { + "epoch": 0.6185937113146819, + "grad_norm": 6.906254856556784, + "learning_rate": 1.0061901470623191e-06, + "loss": 0.5475, + "step": 14991 + }, + { + "epoch": 0.6186349756540398, + "grad_norm": 6.466146100025101, + "learning_rate": 1.0060008504157895e-06, + "loss": 0.5689, + "step": 14992 + }, + { + "epoch": 0.6186762399933977, + "grad_norm": 6.76467385455145, + "learning_rate": 1.0058115625933214e-06, + "loss": 0.5744, + "step": 14993 + }, + { + "epoch": 0.6187175043327556, + "grad_norm": 1.9576929630006967, + "learning_rate": 1.0056222835982945e-06, + "loss": 0.5127, + "step": 14994 + }, + { + "epoch": 0.6187587686721135, + "grad_norm": 3.583605415724632, + "learning_rate": 1.0054330134340906e-06, + "loss": 0.4779, + "step": 14995 + }, + { + "epoch": 0.6188000330114715, + "grad_norm": 4.435254072158986, + "learning_rate": 1.00524375210409e-06, + "loss": 0.4844, + "step": 14996 + }, + { + "epoch": 0.6188412973508294, + "grad_norm": 2.366209523943628, + "learning_rate": 1.0050544996116742e-06, + "loss": 0.4826, + "step": 14997 + }, + { + "epoch": 0.6188825616901873, + "grad_norm": 1.9813628566045274, + "learning_rate": 1.0048652559602226e-06, + "loss": 0.5338, + "step": 14998 + }, + { + "epoch": 0.6189238260295453, + "grad_norm": 2.122953989013583, + "learning_rate": 1.004676021153117e-06, + "loss": 0.522, + "step": 14999 + }, + { + "epoch": 0.6189650903689032, + "grad_norm": 2.3867663343128234, + "learning_rate": 1.0044867951937363e-06, + "loss": 0.5498, + "step": 15000 + }, + { + "epoch": 0.6190063547082612, + "grad_norm": 1.9719641064672884, + "learning_rate": 1.0042975780854607e-06, + "loss": 0.5463, + "step": 15001 + }, + { + "epoch": 0.6190476190476191, + "grad_norm": 2.9957350283344164, + "learning_rate": 1.004108369831671e-06, + "loss": 0.5481, + "step": 15002 + }, + { + "epoch": 0.619088883386977, + "grad_norm": 2.2252613121169142, + "learning_rate": 1.0039191704357456e-06, + "loss": 0.5417, + "step": 15003 + }, + { + "epoch": 0.6191301477263349, + "grad_norm": 2.8498060328319426, + "learning_rate": 1.0037299799010654e-06, + "loss": 0.4759, + "step": 15004 + }, + { + "epoch": 0.6191714120656928, + "grad_norm": 2.023658232894975, + "learning_rate": 1.0035407982310094e-06, + "loss": 0.4904, + "step": 15005 + }, + { + "epoch": 0.6192126764050507, + "grad_norm": 5.012581639663122, + "learning_rate": 1.0033516254289563e-06, + "loss": 0.5087, + "step": 15006 + }, + { + "epoch": 0.6192539407444086, + "grad_norm": 3.687598834377953, + "learning_rate": 1.0031624614982856e-06, + "loss": 0.4941, + "step": 15007 + }, + { + "epoch": 0.6192952050837666, + "grad_norm": 1.8854442248690566, + "learning_rate": 1.0029733064423762e-06, + "loss": 0.4907, + "step": 15008 + }, + { + "epoch": 0.6193364694231245, + "grad_norm": 3.07162205650486, + "learning_rate": 1.0027841602646073e-06, + "loss": 0.5275, + "step": 15009 + }, + { + "epoch": 0.6193777337624825, + "grad_norm": 2.772142701843592, + "learning_rate": 1.0025950229683565e-06, + "loss": 0.5628, + "step": 15010 + }, + { + "epoch": 0.6194189981018404, + "grad_norm": 1.840608375498319, + "learning_rate": 1.0024058945570033e-06, + "loss": 0.4856, + "step": 15011 + }, + { + "epoch": 0.6194602624411983, + "grad_norm": 5.086099497749026, + "learning_rate": 1.002216775033926e-06, + "loss": 0.5234, + "step": 15012 + }, + { + "epoch": 0.6195015267805563, + "grad_norm": 7.422616456600119, + "learning_rate": 1.002027664402502e-06, + "loss": 0.5651, + "step": 15013 + }, + { + "epoch": 0.6195427911199142, + "grad_norm": 3.7775387655482064, + "learning_rate": 1.0018385626661097e-06, + "loss": 0.4977, + "step": 15014 + }, + { + "epoch": 0.6195840554592721, + "grad_norm": 5.0950254138758275, + "learning_rate": 1.0016494698281266e-06, + "loss": 0.5109, + "step": 15015 + }, + { + "epoch": 0.61962531979863, + "grad_norm": 3.023080804449129, + "learning_rate": 1.0014603858919308e-06, + "loss": 0.5746, + "step": 15016 + }, + { + "epoch": 0.6196665841379879, + "grad_norm": 5.261490067548864, + "learning_rate": 1.0012713108608997e-06, + "loss": 0.5253, + "step": 15017 + }, + { + "epoch": 0.6197078484773458, + "grad_norm": 6.0904922781769715, + "learning_rate": 1.0010822447384107e-06, + "loss": 0.4783, + "step": 15018 + }, + { + "epoch": 0.6197491128167038, + "grad_norm": 2.5058285855958093, + "learning_rate": 1.0008931875278406e-06, + "loss": 0.5387, + "step": 15019 + }, + { + "epoch": 0.6197903771560618, + "grad_norm": 5.275965129557182, + "learning_rate": 1.0007041392325672e-06, + "loss": 0.518, + "step": 15020 + }, + { + "epoch": 0.6198316414954197, + "grad_norm": 2.3201343448492318, + "learning_rate": 1.0005150998559664e-06, + "loss": 0.5313, + "step": 15021 + }, + { + "epoch": 0.6198729058347776, + "grad_norm": 2.51641776226317, + "learning_rate": 1.0003260694014159e-06, + "loss": 0.5431, + "step": 15022 + }, + { + "epoch": 0.6199141701741355, + "grad_norm": 2.5441517978565233, + "learning_rate": 1.0001370478722915e-06, + "loss": 0.5038, + "step": 15023 + }, + { + "epoch": 0.6199554345134934, + "grad_norm": 7.420758929385753, + "learning_rate": 9.9994803527197e-07, + "loss": 0.514, + "step": 15024 + }, + { + "epoch": 0.6199966988528514, + "grad_norm": 8.435057291163792, + "learning_rate": 9.99759031603828e-07, + "loss": 0.5667, + "step": 15025 + }, + { + "epoch": 0.6200379631922093, + "grad_norm": 2.480903585153149, + "learning_rate": 9.995700368712408e-07, + "loss": 0.5019, + "step": 15026 + }, + { + "epoch": 0.6200792275315672, + "grad_norm": 2.7599629154896554, + "learning_rate": 9.993810510775843e-07, + "loss": 0.5798, + "step": 15027 + }, + { + "epoch": 0.6201204918709251, + "grad_norm": 2.2401472045126245, + "learning_rate": 9.99192074226235e-07, + "loss": 0.4797, + "step": 15028 + }, + { + "epoch": 0.620161756210283, + "grad_norm": 2.0652120556276357, + "learning_rate": 9.990031063205674e-07, + "loss": 0.5116, + "step": 15029 + }, + { + "epoch": 0.620203020549641, + "grad_norm": 3.307271687822028, + "learning_rate": 9.988141473639582e-07, + "loss": 0.5676, + "step": 15030 + }, + { + "epoch": 0.620244284888999, + "grad_norm": 1.9946041068240796, + "learning_rate": 9.986251973597818e-07, + "loss": 0.5093, + "step": 15031 + }, + { + "epoch": 0.6202855492283569, + "grad_norm": 2.60182021563008, + "learning_rate": 9.984362563114138e-07, + "loss": 0.5238, + "step": 15032 + }, + { + "epoch": 0.6203268135677148, + "grad_norm": 2.1682873509762493, + "learning_rate": 9.98247324222229e-07, + "loss": 0.5195, + "step": 15033 + }, + { + "epoch": 0.6203680779070727, + "grad_norm": 4.198123172887669, + "learning_rate": 9.980584010956023e-07, + "loss": 0.5287, + "step": 15034 + }, + { + "epoch": 0.6204093422464306, + "grad_norm": 1.9894031868212334, + "learning_rate": 9.978694869349078e-07, + "loss": 0.4854, + "step": 15035 + }, + { + "epoch": 0.6204506065857885, + "grad_norm": 2.502377658054239, + "learning_rate": 9.976805817435208e-07, + "loss": 0.5013, + "step": 15036 + }, + { + "epoch": 0.6204918709251465, + "grad_norm": 3.3014346237152363, + "learning_rate": 9.974916855248148e-07, + "loss": 0.5478, + "step": 15037 + }, + { + "epoch": 0.6205331352645044, + "grad_norm": 4.019621696040726, + "learning_rate": 9.973027982821648e-07, + "loss": 0.5584, + "step": 15038 + }, + { + "epoch": 0.6205743996038623, + "grad_norm": 2.4453344832039647, + "learning_rate": 9.971139200189448e-07, + "loss": 0.5369, + "step": 15039 + }, + { + "epoch": 0.6206156639432203, + "grad_norm": 34.57325769632081, + "learning_rate": 9.969250507385276e-07, + "loss": 0.4894, + "step": 15040 + }, + { + "epoch": 0.6206569282825782, + "grad_norm": 20.372630607860188, + "learning_rate": 9.967361904442875e-07, + "loss": 0.5398, + "step": 15041 + }, + { + "epoch": 0.6206981926219362, + "grad_norm": 3.2556424354112714, + "learning_rate": 9.96547339139598e-07, + "loss": 0.5356, + "step": 15042 + }, + { + "epoch": 0.6207394569612941, + "grad_norm": 2.9016740033729107, + "learning_rate": 9.963584968278325e-07, + "loss": 0.5214, + "step": 15043 + }, + { + "epoch": 0.620780721300652, + "grad_norm": 6.319238522308547, + "learning_rate": 9.961696635123644e-07, + "loss": 0.4964, + "step": 15044 + }, + { + "epoch": 0.6208219856400099, + "grad_norm": 2.103249769593124, + "learning_rate": 9.959808391965662e-07, + "loss": 0.5071, + "step": 15045 + }, + { + "epoch": 0.6208632499793678, + "grad_norm": 2.0933516125910807, + "learning_rate": 9.957920238838114e-07, + "loss": 0.5693, + "step": 15046 + }, + { + "epoch": 0.6209045143187257, + "grad_norm": 2.598426255219987, + "learning_rate": 9.956032175774719e-07, + "loss": 0.5287, + "step": 15047 + }, + { + "epoch": 0.6209457786580836, + "grad_norm": 4.111577296813672, + "learning_rate": 9.954144202809213e-07, + "loss": 0.5995, + "step": 15048 + }, + { + "epoch": 0.6209870429974416, + "grad_norm": 1.963728632157423, + "learning_rate": 9.952256319975307e-07, + "loss": 0.4807, + "step": 15049 + }, + { + "epoch": 0.6210283073367996, + "grad_norm": 16.792603790188853, + "learning_rate": 9.950368527306738e-07, + "loss": 0.492, + "step": 15050 + }, + { + "epoch": 0.6210695716761575, + "grad_norm": 2.9262590857494124, + "learning_rate": 9.948480824837214e-07, + "loss": 0.5487, + "step": 15051 + }, + { + "epoch": 0.6211108360155154, + "grad_norm": 6.9353482473509045, + "learning_rate": 9.946593212600466e-07, + "loss": 0.5051, + "step": 15052 + }, + { + "epoch": 0.6211521003548733, + "grad_norm": 11.040435629896653, + "learning_rate": 9.944705690630203e-07, + "loss": 0.519, + "step": 15053 + }, + { + "epoch": 0.6211933646942313, + "grad_norm": 42.25435967228827, + "learning_rate": 9.942818258960139e-07, + "loss": 0.5399, + "step": 15054 + }, + { + "epoch": 0.6212346290335892, + "grad_norm": 2.926211168500808, + "learning_rate": 9.940930917623995e-07, + "loss": 0.5289, + "step": 15055 + }, + { + "epoch": 0.6212758933729471, + "grad_norm": 3.1106899139510618, + "learning_rate": 9.939043666655475e-07, + "loss": 0.5702, + "step": 15056 + }, + { + "epoch": 0.621317157712305, + "grad_norm": 4.849964393712996, + "learning_rate": 9.937156506088303e-07, + "loss": 0.4927, + "step": 15057 + }, + { + "epoch": 0.6213584220516629, + "grad_norm": 2.622791364747096, + "learning_rate": 9.935269435956176e-07, + "loss": 0.5665, + "step": 15058 + }, + { + "epoch": 0.6213996863910208, + "grad_norm": 2.09738872615899, + "learning_rate": 9.933382456292811e-07, + "loss": 0.5097, + "step": 15059 + }, + { + "epoch": 0.6214409507303789, + "grad_norm": 3.2777827523513188, + "learning_rate": 9.931495567131906e-07, + "loss": 0.4832, + "step": 15060 + }, + { + "epoch": 0.6214822150697368, + "grad_norm": 2.385174714194708, + "learning_rate": 9.929608768507173e-07, + "loss": 0.489, + "step": 15061 + }, + { + "epoch": 0.6215234794090947, + "grad_norm": 2.4057228011464056, + "learning_rate": 9.927722060452307e-07, + "loss": 0.4919, + "step": 15062 + }, + { + "epoch": 0.6215647437484526, + "grad_norm": 4.984564632200719, + "learning_rate": 9.92583544300102e-07, + "loss": 0.5152, + "step": 15063 + }, + { + "epoch": 0.6216060080878105, + "grad_norm": 3.267968867387394, + "learning_rate": 9.923948916187004e-07, + "loss": 0.564, + "step": 15064 + }, + { + "epoch": 0.6216472724271684, + "grad_norm": 2.8257092694258716, + "learning_rate": 9.922062480043961e-07, + "loss": 0.5095, + "step": 15065 + }, + { + "epoch": 0.6216885367665264, + "grad_norm": 8.920557104602436, + "learning_rate": 9.92017613460558e-07, + "loss": 0.5982, + "step": 15066 + }, + { + "epoch": 0.6217298011058843, + "grad_norm": 4.845423542390529, + "learning_rate": 9.918289879905563e-07, + "loss": 0.5519, + "step": 15067 + }, + { + "epoch": 0.6217710654452422, + "grad_norm": 2.6462192878964137, + "learning_rate": 9.916403715977598e-07, + "loss": 0.5187, + "step": 15068 + }, + { + "epoch": 0.6218123297846001, + "grad_norm": 5.050068764596707, + "learning_rate": 9.914517642855385e-07, + "loss": 0.4581, + "step": 15069 + }, + { + "epoch": 0.621853594123958, + "grad_norm": 4.975988334312293, + "learning_rate": 9.912631660572604e-07, + "loss": 0.5381, + "step": 15070 + }, + { + "epoch": 0.621894858463316, + "grad_norm": 4.239708207027042, + "learning_rate": 9.910745769162953e-07, + "loss": 0.4909, + "step": 15071 + }, + { + "epoch": 0.621936122802674, + "grad_norm": 5.290282660597923, + "learning_rate": 9.908859968660109e-07, + "loss": 0.5253, + "step": 15072 + }, + { + "epoch": 0.6219773871420319, + "grad_norm": 4.05704973847922, + "learning_rate": 9.906974259097765e-07, + "loss": 0.5325, + "step": 15073 + }, + { + "epoch": 0.6220186514813898, + "grad_norm": 3.3434463797368186, + "learning_rate": 9.9050886405096e-07, + "loss": 0.5121, + "step": 15074 + }, + { + "epoch": 0.6220599158207477, + "grad_norm": 3.002962174585552, + "learning_rate": 9.903203112929302e-07, + "loss": 0.4876, + "step": 15075 + }, + { + "epoch": 0.6221011801601056, + "grad_norm": 2.75528342818528, + "learning_rate": 9.90131767639054e-07, + "loss": 0.4709, + "step": 15076 + }, + { + "epoch": 0.6221424444994635, + "grad_norm": 2.518651986881797, + "learning_rate": 9.899432330927004e-07, + "loss": 0.4826, + "step": 15077 + }, + { + "epoch": 0.6221837088388215, + "grad_norm": 2.7010588433557325, + "learning_rate": 9.89754707657237e-07, + "loss": 0.5372, + "step": 15078 + }, + { + "epoch": 0.6222249731781794, + "grad_norm": 2.7062020287722244, + "learning_rate": 9.895661913360301e-07, + "loss": 0.4872, + "step": 15079 + }, + { + "epoch": 0.6222662375175373, + "grad_norm": 2.525944895116372, + "learning_rate": 9.893776841324484e-07, + "loss": 0.55, + "step": 15080 + }, + { + "epoch": 0.6223075018568953, + "grad_norm": 2.934601055674904, + "learning_rate": 9.891891860498582e-07, + "loss": 0.5504, + "step": 15081 + }, + { + "epoch": 0.6223487661962532, + "grad_norm": 2.159015319705831, + "learning_rate": 9.890006970916274e-07, + "loss": 0.498, + "step": 15082 + }, + { + "epoch": 0.6223900305356111, + "grad_norm": 7.5351199843780945, + "learning_rate": 9.888122172611226e-07, + "loss": 0.5064, + "step": 15083 + }, + { + "epoch": 0.6224312948749691, + "grad_norm": 4.134309726573963, + "learning_rate": 9.8862374656171e-07, + "loss": 0.4817, + "step": 15084 + }, + { + "epoch": 0.622472559214327, + "grad_norm": 4.242107328557505, + "learning_rate": 9.884352849967568e-07, + "loss": 0.516, + "step": 15085 + }, + { + "epoch": 0.6225138235536849, + "grad_norm": 2.141334844485136, + "learning_rate": 9.88246832569629e-07, + "loss": 0.5506, + "step": 15086 + }, + { + "epoch": 0.6225550878930428, + "grad_norm": 26.972580864546753, + "learning_rate": 9.880583892836932e-07, + "loss": 0.5623, + "step": 15087 + }, + { + "epoch": 0.6225963522324007, + "grad_norm": 6.0171605288858405, + "learning_rate": 9.878699551423147e-07, + "loss": 0.519, + "step": 15088 + }, + { + "epoch": 0.6226376165717586, + "grad_norm": 2.6046376684307564, + "learning_rate": 9.876815301488605e-07, + "loss": 0.5067, + "step": 15089 + }, + { + "epoch": 0.6226788809111166, + "grad_norm": 2.3904365530524694, + "learning_rate": 9.874931143066957e-07, + "loss": 0.5259, + "step": 15090 + }, + { + "epoch": 0.6227201452504746, + "grad_norm": 1.9779563951228276, + "learning_rate": 9.873047076191863e-07, + "loss": 0.4669, + "step": 15091 + }, + { + "epoch": 0.6227614095898325, + "grad_norm": 14.888688189311058, + "learning_rate": 9.87116310089697e-07, + "loss": 0.4685, + "step": 15092 + }, + { + "epoch": 0.6228026739291904, + "grad_norm": 3.3938826566717895, + "learning_rate": 9.869279217215935e-07, + "loss": 0.4882, + "step": 15093 + }, + { + "epoch": 0.6228439382685483, + "grad_norm": 6.794539373188288, + "learning_rate": 9.867395425182407e-07, + "loss": 0.5623, + "step": 15094 + }, + { + "epoch": 0.6228852026079063, + "grad_norm": 2.636454382613661, + "learning_rate": 9.865511724830037e-07, + "loss": 0.49, + "step": 15095 + }, + { + "epoch": 0.6229264669472642, + "grad_norm": 1.8167324456424767, + "learning_rate": 9.86362811619247e-07, + "loss": 0.4903, + "step": 15096 + }, + { + "epoch": 0.6229677312866221, + "grad_norm": 2.4420694764392343, + "learning_rate": 9.861744599303359e-07, + "loss": 0.5087, + "step": 15097 + }, + { + "epoch": 0.62300899562598, + "grad_norm": 2.274349302927099, + "learning_rate": 9.85986117419634e-07, + "loss": 0.4935, + "step": 15098 + }, + { + "epoch": 0.6230502599653379, + "grad_norm": 4.768228651722546, + "learning_rate": 9.857977840905058e-07, + "loss": 0.4991, + "step": 15099 + }, + { + "epoch": 0.6230915243046958, + "grad_norm": 4.029215197512968, + "learning_rate": 9.856094599463158e-07, + "loss": 0.5362, + "step": 15100 + }, + { + "epoch": 0.6231327886440539, + "grad_norm": 2.3682055442324326, + "learning_rate": 9.854211449904273e-07, + "loss": 0.5367, + "step": 15101 + }, + { + "epoch": 0.6231740529834118, + "grad_norm": 2.7498585021482547, + "learning_rate": 9.852328392262051e-07, + "loss": 0.5009, + "step": 15102 + }, + { + "epoch": 0.6232153173227697, + "grad_norm": 2.863621021697088, + "learning_rate": 9.850445426570118e-07, + "loss": 0.513, + "step": 15103 + }, + { + "epoch": 0.6232565816621276, + "grad_norm": 9.5037707123287, + "learning_rate": 9.848562552862115e-07, + "loss": 0.5276, + "step": 15104 + }, + { + "epoch": 0.6232978460014855, + "grad_norm": 3.214773091542585, + "learning_rate": 9.846679771171667e-07, + "loss": 0.5013, + "step": 15105 + }, + { + "epoch": 0.6233391103408434, + "grad_norm": 2.976723793733278, + "learning_rate": 9.844797081532411e-07, + "loss": 0.5217, + "step": 15106 + }, + { + "epoch": 0.6233803746802014, + "grad_norm": 3.161577731961501, + "learning_rate": 9.842914483977975e-07, + "loss": 0.5563, + "step": 15107 + }, + { + "epoch": 0.6234216390195593, + "grad_norm": 2.3349376504079817, + "learning_rate": 9.84103197854199e-07, + "loss": 0.5196, + "step": 15108 + }, + { + "epoch": 0.6234629033589172, + "grad_norm": 11.161496963095788, + "learning_rate": 9.839149565258074e-07, + "loss": 0.5195, + "step": 15109 + }, + { + "epoch": 0.6235041676982751, + "grad_norm": 2.32216354834235, + "learning_rate": 9.83726724415986e-07, + "loss": 0.515, + "step": 15110 + }, + { + "epoch": 0.6235454320376331, + "grad_norm": 3.08748464834484, + "learning_rate": 9.835385015280969e-07, + "loss": 0.5173, + "step": 15111 + }, + { + "epoch": 0.623586696376991, + "grad_norm": 2.727798765182666, + "learning_rate": 9.83350287865502e-07, + "loss": 0.5176, + "step": 15112 + }, + { + "epoch": 0.623627960716349, + "grad_norm": 4.4926527743333935, + "learning_rate": 9.831620834315637e-07, + "loss": 0.5577, + "step": 15113 + }, + { + "epoch": 0.6236692250557069, + "grad_norm": 3.5908696230710784, + "learning_rate": 9.829738882296434e-07, + "loss": 0.4947, + "step": 15114 + }, + { + "epoch": 0.6237104893950648, + "grad_norm": 2.6320509776519265, + "learning_rate": 9.827857022631026e-07, + "loss": 0.5193, + "step": 15115 + }, + { + "epoch": 0.6237517537344227, + "grad_norm": 3.517568115664706, + "learning_rate": 9.825975255353032e-07, + "loss": 0.5245, + "step": 15116 + }, + { + "epoch": 0.6237930180737806, + "grad_norm": 2.2949977206906245, + "learning_rate": 9.824093580496069e-07, + "loss": 0.5056, + "step": 15117 + }, + { + "epoch": 0.6238342824131385, + "grad_norm": 2.9529237485391704, + "learning_rate": 9.822211998093737e-07, + "loss": 0.5129, + "step": 15118 + }, + { + "epoch": 0.6238755467524965, + "grad_norm": 14.20611779716501, + "learning_rate": 9.820330508179652e-07, + "loss": 0.4588, + "step": 15119 + }, + { + "epoch": 0.6239168110918544, + "grad_norm": 5.0072275212372785, + "learning_rate": 9.818449110787421e-07, + "loss": 0.4952, + "step": 15120 + }, + { + "epoch": 0.6239580754312124, + "grad_norm": 4.440011912850933, + "learning_rate": 9.816567805950654e-07, + "loss": 0.4856, + "step": 15121 + }, + { + "epoch": 0.6239993397705703, + "grad_norm": 7.04569231460158, + "learning_rate": 9.814686593702952e-07, + "loss": 0.5153, + "step": 15122 + }, + { + "epoch": 0.6240406041099282, + "grad_norm": 1.8815822695438444, + "learning_rate": 9.812805474077916e-07, + "loss": 0.437, + "step": 15123 + }, + { + "epoch": 0.6240818684492861, + "grad_norm": 4.586066966245918, + "learning_rate": 9.810924447109155e-07, + "loss": 0.4853, + "step": 15124 + }, + { + "epoch": 0.6241231327886441, + "grad_norm": 3.2491846241403888, + "learning_rate": 9.80904351283026e-07, + "loss": 0.5335, + "step": 15125 + }, + { + "epoch": 0.624164397128002, + "grad_norm": 2.5337787422773452, + "learning_rate": 9.807162671274836e-07, + "loss": 0.5311, + "step": 15126 + }, + { + "epoch": 0.6242056614673599, + "grad_norm": 2.1294121665582746, + "learning_rate": 9.805281922476476e-07, + "loss": 0.4699, + "step": 15127 + }, + { + "epoch": 0.6242469258067178, + "grad_norm": 2.8258571307566696, + "learning_rate": 9.803401266468778e-07, + "loss": 0.541, + "step": 15128 + }, + { + "epoch": 0.6242881901460757, + "grad_norm": 2.341046154831585, + "learning_rate": 9.80152070328533e-07, + "loss": 0.5103, + "step": 15129 + }, + { + "epoch": 0.6243294544854336, + "grad_norm": 2.980776847240632, + "learning_rate": 9.799640232959732e-07, + "loss": 0.5226, + "step": 15130 + }, + { + "epoch": 0.6243707188247916, + "grad_norm": 2.1576637771671754, + "learning_rate": 9.79775985552557e-07, + "loss": 0.4831, + "step": 15131 + }, + { + "epoch": 0.6244119831641496, + "grad_norm": 2.339896765290376, + "learning_rate": 9.795879571016424e-07, + "loss": 0.5333, + "step": 15132 + }, + { + "epoch": 0.6244532475035075, + "grad_norm": 2.0811886339025945, + "learning_rate": 9.793999379465894e-07, + "loss": 0.4491, + "step": 15133 + }, + { + "epoch": 0.6244945118428654, + "grad_norm": 3.2733030335527697, + "learning_rate": 9.792119280907553e-07, + "loss": 0.5275, + "step": 15134 + }, + { + "epoch": 0.6245357761822233, + "grad_norm": 8.33761246744933, + "learning_rate": 9.79023927537499e-07, + "loss": 0.4873, + "step": 15135 + }, + { + "epoch": 0.6245770405215813, + "grad_norm": 6.6451652593433685, + "learning_rate": 9.788359362901788e-07, + "loss": 0.5795, + "step": 15136 + }, + { + "epoch": 0.6246183048609392, + "grad_norm": 6.378192200491469, + "learning_rate": 9.786479543521526e-07, + "loss": 0.4896, + "step": 15137 + }, + { + "epoch": 0.6246595692002971, + "grad_norm": 2.8135440773657003, + "learning_rate": 9.78459981726778e-07, + "loss": 0.5121, + "step": 15138 + }, + { + "epoch": 0.624700833539655, + "grad_norm": 3.5194048423111806, + "learning_rate": 9.782720184174131e-07, + "loss": 0.5135, + "step": 15139 + }, + { + "epoch": 0.6247420978790129, + "grad_norm": 2.3440851059994037, + "learning_rate": 9.780840644274147e-07, + "loss": 0.5489, + "step": 15140 + }, + { + "epoch": 0.6247833622183708, + "grad_norm": 3.06083632514874, + "learning_rate": 9.77896119760141e-07, + "loss": 0.5033, + "step": 15141 + }, + { + "epoch": 0.6248246265577289, + "grad_norm": 2.4447356316501017, + "learning_rate": 9.777081844189484e-07, + "loss": 0.5372, + "step": 15142 + }, + { + "epoch": 0.6248658908970868, + "grad_norm": 2.7680606556878553, + "learning_rate": 9.775202584071943e-07, + "loss": 0.5789, + "step": 15143 + }, + { + "epoch": 0.6249071552364447, + "grad_norm": 1.9195791952693009, + "learning_rate": 9.773323417282358e-07, + "loss": 0.5163, + "step": 15144 + }, + { + "epoch": 0.6249484195758026, + "grad_norm": 3.3562844493311164, + "learning_rate": 9.77144434385429e-07, + "loss": 0.5501, + "step": 15145 + }, + { + "epoch": 0.6249896839151605, + "grad_norm": 2.47919082472624, + "learning_rate": 9.769565363821302e-07, + "loss": 0.4895, + "step": 15146 + }, + { + "epoch": 0.6250309482545184, + "grad_norm": 2.119443408153009, + "learning_rate": 9.767686477216964e-07, + "loss": 0.4891, + "step": 15147 + }, + { + "epoch": 0.6250722125938764, + "grad_norm": 2.9863402782709216, + "learning_rate": 9.765807684074832e-07, + "loss": 0.5507, + "step": 15148 + }, + { + "epoch": 0.6251134769332343, + "grad_norm": 3.6422094314363322, + "learning_rate": 9.76392898442847e-07, + "loss": 0.5281, + "step": 15149 + }, + { + "epoch": 0.6251547412725922, + "grad_norm": 5.279500072346455, + "learning_rate": 9.762050378311435e-07, + "loss": 0.5014, + "step": 15150 + }, + { + "epoch": 0.6251960056119501, + "grad_norm": 10.94216716827, + "learning_rate": 9.760171865757284e-07, + "loss": 0.5097, + "step": 15151 + }, + { + "epoch": 0.6252372699513081, + "grad_norm": 3.0429392945154388, + "learning_rate": 9.758293446799572e-07, + "loss": 0.5845, + "step": 15152 + }, + { + "epoch": 0.625278534290666, + "grad_norm": 7.2592855656374216, + "learning_rate": 9.756415121471852e-07, + "loss": 0.4868, + "step": 15153 + }, + { + "epoch": 0.625319798630024, + "grad_norm": 4.334459171817946, + "learning_rate": 9.754536889807673e-07, + "loss": 0.4646, + "step": 15154 + }, + { + "epoch": 0.6253610629693819, + "grad_norm": 2.475349447683405, + "learning_rate": 9.75265875184059e-07, + "loss": 0.5405, + "step": 15155 + }, + { + "epoch": 0.6254023273087398, + "grad_norm": 2.865812113780111, + "learning_rate": 9.750780707604146e-07, + "loss": 0.4786, + "step": 15156 + }, + { + "epoch": 0.6254435916480977, + "grad_norm": 4.163382575997315, + "learning_rate": 9.748902757131896e-07, + "loss": 0.5581, + "step": 15157 + }, + { + "epoch": 0.6254848559874556, + "grad_norm": 2.9486637976771903, + "learning_rate": 9.747024900457375e-07, + "loss": 0.5346, + "step": 15158 + }, + { + "epoch": 0.6255261203268135, + "grad_norm": 2.4168004887892196, + "learning_rate": 9.745147137614128e-07, + "loss": 0.463, + "step": 15159 + }, + { + "epoch": 0.6255673846661715, + "grad_norm": 2.7012222511116097, + "learning_rate": 9.7432694686357e-07, + "loss": 0.4917, + "step": 15160 + }, + { + "epoch": 0.6256086490055294, + "grad_norm": 5.682103073414395, + "learning_rate": 9.741391893555633e-07, + "loss": 0.4447, + "step": 15161 + }, + { + "epoch": 0.6256499133448874, + "grad_norm": 5.659445338945279, + "learning_rate": 9.739514412407457e-07, + "loss": 0.5195, + "step": 15162 + }, + { + "epoch": 0.6256911776842453, + "grad_norm": 1.9457992248508749, + "learning_rate": 9.737637025224717e-07, + "loss": 0.4471, + "step": 15163 + }, + { + "epoch": 0.6257324420236032, + "grad_norm": 3.509093094628191, + "learning_rate": 9.735759732040941e-07, + "loss": 0.5363, + "step": 15164 + }, + { + "epoch": 0.6257737063629611, + "grad_norm": 2.2154889508595286, + "learning_rate": 9.73388253288967e-07, + "loss": 0.5483, + "step": 15165 + }, + { + "epoch": 0.6258149707023191, + "grad_norm": 3.743003372535132, + "learning_rate": 9.732005427804423e-07, + "loss": 0.5191, + "step": 15166 + }, + { + "epoch": 0.625856235041677, + "grad_norm": 2.599268424103134, + "learning_rate": 9.730128416818746e-07, + "loss": 0.4618, + "step": 15167 + }, + { + "epoch": 0.6258974993810349, + "grad_norm": 1.8104376467185392, + "learning_rate": 9.72825149996615e-07, + "loss": 0.4968, + "step": 15168 + }, + { + "epoch": 0.6259387637203928, + "grad_norm": 5.128740390489437, + "learning_rate": 9.72637467728018e-07, + "loss": 0.4959, + "step": 15169 + }, + { + "epoch": 0.6259800280597507, + "grad_norm": 3.987733569572835, + "learning_rate": 9.724497948794347e-07, + "loss": 0.5092, + "step": 15170 + }, + { + "epoch": 0.6260212923991086, + "grad_norm": 3.1606963156392105, + "learning_rate": 9.722621314542178e-07, + "loss": 0.4936, + "step": 15171 + }, + { + "epoch": 0.6260625567384667, + "grad_norm": 3.4748653117359427, + "learning_rate": 9.720744774557192e-07, + "loss": 0.5307, + "step": 15172 + }, + { + "epoch": 0.6261038210778246, + "grad_norm": 2.3311900908581986, + "learning_rate": 9.71886832887291e-07, + "loss": 0.5236, + "step": 15173 + }, + { + "epoch": 0.6261450854171825, + "grad_norm": 3.5024808936452843, + "learning_rate": 9.716991977522854e-07, + "loss": 0.5002, + "step": 15174 + }, + { + "epoch": 0.6261863497565404, + "grad_norm": 3.984778388423885, + "learning_rate": 9.715115720540536e-07, + "loss": 0.5155, + "step": 15175 + }, + { + "epoch": 0.6262276140958983, + "grad_norm": 4.029659731906666, + "learning_rate": 9.713239557959472e-07, + "loss": 0.5786, + "step": 15176 + }, + { + "epoch": 0.6262688784352562, + "grad_norm": 5.850753278538535, + "learning_rate": 9.711363489813176e-07, + "loss": 0.5221, + "step": 15177 + }, + { + "epoch": 0.6263101427746142, + "grad_norm": 2.4905510040505208, + "learning_rate": 9.709487516135157e-07, + "loss": 0.5014, + "step": 15178 + }, + { + "epoch": 0.6263514071139721, + "grad_norm": 5.6236374490755, + "learning_rate": 9.707611636958923e-07, + "loss": 0.5601, + "step": 15179 + }, + { + "epoch": 0.62639267145333, + "grad_norm": 2.0853868076413553, + "learning_rate": 9.705735852317991e-07, + "loss": 0.551, + "step": 15180 + }, + { + "epoch": 0.6264339357926879, + "grad_norm": 7.147040917667524, + "learning_rate": 9.703860162245859e-07, + "loss": 0.4928, + "step": 15181 + }, + { + "epoch": 0.6264752001320459, + "grad_norm": 2.038908858734859, + "learning_rate": 9.70198456677603e-07, + "loss": 0.5235, + "step": 15182 + }, + { + "epoch": 0.6265164644714039, + "grad_norm": 2.9092086345339956, + "learning_rate": 9.700109065942017e-07, + "loss": 0.5717, + "step": 15183 + }, + { + "epoch": 0.6265577288107618, + "grad_norm": 3.3117945451576682, + "learning_rate": 9.698233659777312e-07, + "loss": 0.4973, + "step": 15184 + }, + { + "epoch": 0.6265989931501197, + "grad_norm": 2.391857914151936, + "learning_rate": 9.696358348315413e-07, + "loss": 0.5731, + "step": 15185 + }, + { + "epoch": 0.6266402574894776, + "grad_norm": 1.8730569841432012, + "learning_rate": 9.694483131589824e-07, + "loss": 0.5241, + "step": 15186 + }, + { + "epoch": 0.6266815218288355, + "grad_norm": 1.785430225463371, + "learning_rate": 9.692608009634035e-07, + "loss": 0.5047, + "step": 15187 + }, + { + "epoch": 0.6267227861681934, + "grad_norm": 4.31840718860974, + "learning_rate": 9.69073298248155e-07, + "loss": 0.5007, + "step": 15188 + }, + { + "epoch": 0.6267640505075514, + "grad_norm": 2.9098455278635504, + "learning_rate": 9.68885805016585e-07, + "loss": 0.4891, + "step": 15189 + }, + { + "epoch": 0.6268053148469093, + "grad_norm": 2.0729351473365303, + "learning_rate": 9.686983212720436e-07, + "loss": 0.5362, + "step": 15190 + }, + { + "epoch": 0.6268465791862672, + "grad_norm": 2.4022471766391154, + "learning_rate": 9.68510847017879e-07, + "loss": 0.5043, + "step": 15191 + }, + { + "epoch": 0.6268878435256252, + "grad_norm": 2.8795582724765696, + "learning_rate": 9.683233822574406e-07, + "loss": 0.5015, + "step": 15192 + }, + { + "epoch": 0.6269291078649831, + "grad_norm": 12.888811353251926, + "learning_rate": 9.681359269940762e-07, + "loss": 0.5261, + "step": 15193 + }, + { + "epoch": 0.626970372204341, + "grad_norm": 3.0859333444055785, + "learning_rate": 9.67948481231135e-07, + "loss": 0.5391, + "step": 15194 + }, + { + "epoch": 0.627011636543699, + "grad_norm": 2.231621851714881, + "learning_rate": 9.677610449719646e-07, + "loss": 0.5188, + "step": 15195 + }, + { + "epoch": 0.6270529008830569, + "grad_norm": 12.571979959139167, + "learning_rate": 9.675736182199138e-07, + "loss": 0.4876, + "step": 15196 + }, + { + "epoch": 0.6270941652224148, + "grad_norm": 2.590196695167873, + "learning_rate": 9.6738620097833e-07, + "loss": 0.5157, + "step": 15197 + }, + { + "epoch": 0.6271354295617727, + "grad_norm": 6.0793306508367255, + "learning_rate": 9.67198793250561e-07, + "loss": 0.5138, + "step": 15198 + }, + { + "epoch": 0.6271766939011306, + "grad_norm": 8.117779159284078, + "learning_rate": 9.670113950399545e-07, + "loss": 0.4698, + "step": 15199 + }, + { + "epoch": 0.6272179582404885, + "grad_norm": 3.4016720125042674, + "learning_rate": 9.668240063498577e-07, + "loss": 0.5247, + "step": 15200 + }, + { + "epoch": 0.6272592225798465, + "grad_norm": 4.484697082764418, + "learning_rate": 9.666366271836179e-07, + "loss": 0.5209, + "step": 15201 + }, + { + "epoch": 0.6273004869192044, + "grad_norm": 2.5357404165626924, + "learning_rate": 9.664492575445823e-07, + "loss": 0.527, + "step": 15202 + }, + { + "epoch": 0.6273417512585624, + "grad_norm": 4.066628618215662, + "learning_rate": 9.662618974360976e-07, + "loss": 0.5262, + "step": 15203 + }, + { + "epoch": 0.6273830155979203, + "grad_norm": 1.9884347692926057, + "learning_rate": 9.660745468615106e-07, + "loss": 0.5057, + "step": 15204 + }, + { + "epoch": 0.6274242799372782, + "grad_norm": 3.083928980097701, + "learning_rate": 9.658872058241677e-07, + "loss": 0.4772, + "step": 15205 + }, + { + "epoch": 0.6274655442766361, + "grad_norm": 6.40090126169938, + "learning_rate": 9.656998743274158e-07, + "loss": 0.5269, + "step": 15206 + }, + { + "epoch": 0.6275068086159941, + "grad_norm": 4.293896342642916, + "learning_rate": 9.655125523746007e-07, + "loss": 0.4929, + "step": 15207 + }, + { + "epoch": 0.627548072955352, + "grad_norm": 2.2310325216525326, + "learning_rate": 9.653252399690684e-07, + "loss": 0.5453, + "step": 15208 + }, + { + "epoch": 0.6275893372947099, + "grad_norm": 3.2672792846202166, + "learning_rate": 9.651379371141654e-07, + "loss": 0.4978, + "step": 15209 + }, + { + "epoch": 0.6276306016340678, + "grad_norm": 2.379699277039059, + "learning_rate": 9.64950643813236e-07, + "loss": 0.5764, + "step": 15210 + }, + { + "epoch": 0.6276718659734257, + "grad_norm": 2.4749051839907166, + "learning_rate": 9.647633600696273e-07, + "loss": 0.5223, + "step": 15211 + }, + { + "epoch": 0.6277131303127836, + "grad_norm": 7.642212644637896, + "learning_rate": 9.645760858866834e-07, + "loss": 0.5434, + "step": 15212 + }, + { + "epoch": 0.6277543946521417, + "grad_norm": 3.175165597488075, + "learning_rate": 9.643888212677501e-07, + "loss": 0.5298, + "step": 15213 + }, + { + "epoch": 0.6277956589914996, + "grad_norm": 26.910944960648553, + "learning_rate": 9.642015662161725e-07, + "loss": 0.5157, + "step": 15214 + }, + { + "epoch": 0.6278369233308575, + "grad_norm": 3.382130884937761, + "learning_rate": 9.640143207352954e-07, + "loss": 0.4424, + "step": 15215 + }, + { + "epoch": 0.6278781876702154, + "grad_norm": 3.666669692249316, + "learning_rate": 9.638270848284632e-07, + "loss": 0.536, + "step": 15216 + }, + { + "epoch": 0.6279194520095733, + "grad_norm": 2.683499132172328, + "learning_rate": 9.636398584990205e-07, + "loss": 0.4945, + "step": 15217 + }, + { + "epoch": 0.6279607163489312, + "grad_norm": 13.631775209734009, + "learning_rate": 9.634526417503118e-07, + "loss": 0.5101, + "step": 15218 + }, + { + "epoch": 0.6280019806882892, + "grad_norm": 9.299203747588642, + "learning_rate": 9.63265434585681e-07, + "loss": 0.5187, + "step": 15219 + }, + { + "epoch": 0.6280432450276471, + "grad_norm": 3.296453374798799, + "learning_rate": 9.630782370084727e-07, + "loss": 0.5134, + "step": 15220 + }, + { + "epoch": 0.628084509367005, + "grad_norm": 3.0310183770474657, + "learning_rate": 9.628910490220298e-07, + "loss": 0.5544, + "step": 15221 + }, + { + "epoch": 0.6281257737063629, + "grad_norm": 2.1633319626479017, + "learning_rate": 9.627038706296968e-07, + "loss": 0.534, + "step": 15222 + }, + { + "epoch": 0.6281670380457209, + "grad_norm": 3.743616298074096, + "learning_rate": 9.625167018348168e-07, + "loss": 0.5106, + "step": 15223 + }, + { + "epoch": 0.6282083023850789, + "grad_norm": 4.158486590816607, + "learning_rate": 9.623295426407327e-07, + "loss": 0.4936, + "step": 15224 + }, + { + "epoch": 0.6282495667244368, + "grad_norm": 3.0267960105163154, + "learning_rate": 9.62142393050788e-07, + "loss": 0.5021, + "step": 15225 + }, + { + "epoch": 0.6282908310637947, + "grad_norm": 13.526567379004522, + "learning_rate": 9.619552530683258e-07, + "loss": 0.5117, + "step": 15226 + }, + { + "epoch": 0.6283320954031526, + "grad_norm": 2.1431327522961667, + "learning_rate": 9.61768122696689e-07, + "loss": 0.4429, + "step": 15227 + }, + { + "epoch": 0.6283733597425105, + "grad_norm": 2.3444692154263853, + "learning_rate": 9.615810019392194e-07, + "loss": 0.4607, + "step": 15228 + }, + { + "epoch": 0.6284146240818684, + "grad_norm": 13.411120575139758, + "learning_rate": 9.613938907992604e-07, + "loss": 0.5173, + "step": 15229 + }, + { + "epoch": 0.6284558884212263, + "grad_norm": 3.1186457434076074, + "learning_rate": 9.612067892801538e-07, + "loss": 0.5271, + "step": 15230 + }, + { + "epoch": 0.6284971527605843, + "grad_norm": 1.866238563690154, + "learning_rate": 9.61019697385242e-07, + "loss": 0.5195, + "step": 15231 + }, + { + "epoch": 0.6285384170999422, + "grad_norm": 2.123291178016708, + "learning_rate": 9.608326151178663e-07, + "loss": 0.4909, + "step": 15232 + }, + { + "epoch": 0.6285796814393002, + "grad_norm": 2.866154465524848, + "learning_rate": 9.606455424813695e-07, + "loss": 0.459, + "step": 15233 + }, + { + "epoch": 0.6286209457786581, + "grad_norm": 3.655571224662606, + "learning_rate": 9.60458479479092e-07, + "loss": 0.4653, + "step": 15234 + }, + { + "epoch": 0.628662210118016, + "grad_norm": 4.458227875725716, + "learning_rate": 9.602714261143762e-07, + "loss": 0.5829, + "step": 15235 + }, + { + "epoch": 0.628703474457374, + "grad_norm": 3.0639362371946013, + "learning_rate": 9.60084382390563e-07, + "loss": 0.5208, + "step": 15236 + }, + { + "epoch": 0.6287447387967319, + "grad_norm": 3.94097092521194, + "learning_rate": 9.598973483109933e-07, + "loss": 0.5379, + "step": 15237 + }, + { + "epoch": 0.6287860031360898, + "grad_norm": 2.942535145993828, + "learning_rate": 9.59710323879008e-07, + "loss": 0.5298, + "step": 15238 + }, + { + "epoch": 0.6288272674754477, + "grad_norm": 3.313121681109949, + "learning_rate": 9.595233090979478e-07, + "loss": 0.5162, + "step": 15239 + }, + { + "epoch": 0.6288685318148056, + "grad_norm": 2.727001071563949, + "learning_rate": 9.593363039711533e-07, + "loss": 0.5288, + "step": 15240 + }, + { + "epoch": 0.6289097961541635, + "grad_norm": 2.073396605585177, + "learning_rate": 9.59149308501965e-07, + "loss": 0.5172, + "step": 15241 + }, + { + "epoch": 0.6289510604935215, + "grad_norm": 2.349587278575623, + "learning_rate": 9.589623226937231e-07, + "loss": 0.5538, + "step": 15242 + }, + { + "epoch": 0.6289923248328795, + "grad_norm": 2.862097642691863, + "learning_rate": 9.587753465497678e-07, + "loss": 0.4939, + "step": 15243 + }, + { + "epoch": 0.6290335891722374, + "grad_norm": 14.19328947426031, + "learning_rate": 9.58588380073438e-07, + "loss": 0.5206, + "step": 15244 + }, + { + "epoch": 0.6290748535115953, + "grad_norm": 3.4534456715208863, + "learning_rate": 9.584014232680751e-07, + "loss": 0.5763, + "step": 15245 + }, + { + "epoch": 0.6291161178509532, + "grad_norm": 2.194017392071742, + "learning_rate": 9.582144761370169e-07, + "loss": 0.4927, + "step": 15246 + }, + { + "epoch": 0.6291573821903111, + "grad_norm": 4.555869103857851, + "learning_rate": 9.58027538683604e-07, + "loss": 0.5299, + "step": 15247 + }, + { + "epoch": 0.6291986465296691, + "grad_norm": 2.144810492247738, + "learning_rate": 9.578406109111746e-07, + "loss": 0.4742, + "step": 15248 + }, + { + "epoch": 0.629239910869027, + "grad_norm": 5.561531582322826, + "learning_rate": 9.57653692823069e-07, + "loss": 0.5448, + "step": 15249 + }, + { + "epoch": 0.6292811752083849, + "grad_norm": 2.5696597280297198, + "learning_rate": 9.574667844226247e-07, + "loss": 0.5306, + "step": 15250 + }, + { + "epoch": 0.6293224395477428, + "grad_norm": 2.5364627631367473, + "learning_rate": 9.572798857131807e-07, + "loss": 0.5205, + "step": 15251 + }, + { + "epoch": 0.6293637038871007, + "grad_norm": 2.147060911458704, + "learning_rate": 9.570929966980758e-07, + "loss": 0.5273, + "step": 15252 + }, + { + "epoch": 0.6294049682264587, + "grad_norm": 2.18539960526037, + "learning_rate": 9.569061173806479e-07, + "loss": 0.5109, + "step": 15253 + }, + { + "epoch": 0.6294462325658167, + "grad_norm": 4.793315008610616, + "learning_rate": 9.567192477642356e-07, + "loss": 0.5623, + "step": 15254 + }, + { + "epoch": 0.6294874969051746, + "grad_norm": 2.099870465460474, + "learning_rate": 9.56532387852176e-07, + "loss": 0.4695, + "step": 15255 + }, + { + "epoch": 0.6295287612445325, + "grad_norm": 2.8879764308030618, + "learning_rate": 9.563455376478084e-07, + "loss": 0.5289, + "step": 15256 + }, + { + "epoch": 0.6295700255838904, + "grad_norm": 3.636307211358106, + "learning_rate": 9.56158697154469e-07, + "loss": 0.538, + "step": 15257 + }, + { + "epoch": 0.6296112899232483, + "grad_norm": 3.1488075581640693, + "learning_rate": 9.55971866375496e-07, + "loss": 0.5571, + "step": 15258 + }, + { + "epoch": 0.6296525542626062, + "grad_norm": 19.029874579109187, + "learning_rate": 9.557850453142262e-07, + "loss": 0.5452, + "step": 15259 + }, + { + "epoch": 0.6296938186019642, + "grad_norm": 2.3255506875693572, + "learning_rate": 9.555982339739969e-07, + "loss": 0.5331, + "step": 15260 + }, + { + "epoch": 0.6297350829413221, + "grad_norm": 142.31651065817502, + "learning_rate": 9.554114323581452e-07, + "loss": 0.5363, + "step": 15261 + }, + { + "epoch": 0.62977634728068, + "grad_norm": 3.1782575952302294, + "learning_rate": 9.552246404700082e-07, + "loss": 0.5458, + "step": 15262 + }, + { + "epoch": 0.6298176116200379, + "grad_norm": 5.202715065649903, + "learning_rate": 9.55037858312921e-07, + "loss": 0.5724, + "step": 15263 + }, + { + "epoch": 0.6298588759593959, + "grad_norm": 2.958516901216882, + "learning_rate": 9.548510858902213e-07, + "loss": 0.4703, + "step": 15264 + }, + { + "epoch": 0.6299001402987539, + "grad_norm": 2.118359986129671, + "learning_rate": 9.546643232052448e-07, + "loss": 0.5413, + "step": 15265 + }, + { + "epoch": 0.6299414046381118, + "grad_norm": 2.8688876930501563, + "learning_rate": 9.54477570261328e-07, + "loss": 0.5181, + "step": 15266 + }, + { + "epoch": 0.6299826689774697, + "grad_norm": 3.2253778561922735, + "learning_rate": 9.54290827061806e-07, + "loss": 0.5085, + "step": 15267 + }, + { + "epoch": 0.6300239333168276, + "grad_norm": 2.56961277657908, + "learning_rate": 9.541040936100156e-07, + "loss": 0.4755, + "step": 15268 + }, + { + "epoch": 0.6300651976561855, + "grad_norm": 3.382256293195236, + "learning_rate": 9.53917369909291e-07, + "loss": 0.4715, + "step": 15269 + }, + { + "epoch": 0.6301064619955434, + "grad_norm": 3.3728837072635867, + "learning_rate": 9.53730655962969e-07, + "loss": 0.5415, + "step": 15270 + }, + { + "epoch": 0.6301477263349013, + "grad_norm": 9.288337053238243, + "learning_rate": 9.535439517743834e-07, + "loss": 0.5011, + "step": 15271 + }, + { + "epoch": 0.6301889906742593, + "grad_norm": 3.8230669347158024, + "learning_rate": 9.533572573468704e-07, + "loss": 0.4863, + "step": 15272 + }, + { + "epoch": 0.6302302550136172, + "grad_norm": 2.243247742282079, + "learning_rate": 9.531705726837639e-07, + "loss": 0.504, + "step": 15273 + }, + { + "epoch": 0.6302715193529752, + "grad_norm": 3.5941298567356212, + "learning_rate": 9.52983897788399e-07, + "loss": 0.5533, + "step": 15274 + }, + { + "epoch": 0.6303127836923331, + "grad_norm": 2.365738793303904, + "learning_rate": 9.527972326641106e-07, + "loss": 0.5074, + "step": 15275 + }, + { + "epoch": 0.630354048031691, + "grad_norm": 3.3978719773413477, + "learning_rate": 9.526105773142319e-07, + "loss": 0.5099, + "step": 15276 + }, + { + "epoch": 0.630395312371049, + "grad_norm": 2.751637112642229, + "learning_rate": 9.52423931742098e-07, + "loss": 0.548, + "step": 15277 + }, + { + "epoch": 0.6304365767104069, + "grad_norm": 2.0825943177222297, + "learning_rate": 9.522372959510426e-07, + "loss": 0.5547, + "step": 15278 + }, + { + "epoch": 0.6304778410497648, + "grad_norm": 4.89791524911968, + "learning_rate": 9.520506699443987e-07, + "loss": 0.5599, + "step": 15279 + }, + { + "epoch": 0.6305191053891227, + "grad_norm": 2.337544999250815, + "learning_rate": 9.518640537255013e-07, + "loss": 0.4817, + "step": 15280 + }, + { + "epoch": 0.6305603697284806, + "grad_norm": 2.306789192762277, + "learning_rate": 9.516774472976826e-07, + "loss": 0.5195, + "step": 15281 + }, + { + "epoch": 0.6306016340678385, + "grad_norm": 4.104185889587525, + "learning_rate": 9.514908506642768e-07, + "loss": 0.6023, + "step": 15282 + }, + { + "epoch": 0.6306428984071965, + "grad_norm": 3.1437633929765942, + "learning_rate": 9.513042638286162e-07, + "loss": 0.5621, + "step": 15283 + }, + { + "epoch": 0.6306841627465545, + "grad_norm": 10.451432568892645, + "learning_rate": 9.511176867940344e-07, + "loss": 0.4675, + "step": 15284 + }, + { + "epoch": 0.6307254270859124, + "grad_norm": 9.490274759739721, + "learning_rate": 9.509311195638636e-07, + "loss": 0.5652, + "step": 15285 + }, + { + "epoch": 0.6307666914252703, + "grad_norm": 2.133008453139024, + "learning_rate": 9.507445621414366e-07, + "loss": 0.5464, + "step": 15286 + }, + { + "epoch": 0.6308079557646282, + "grad_norm": 3.321079148290403, + "learning_rate": 9.505580145300857e-07, + "loss": 0.4577, + "step": 15287 + }, + { + "epoch": 0.6308492201039861, + "grad_norm": 4.525415299926728, + "learning_rate": 9.503714767331437e-07, + "loss": 0.5141, + "step": 15288 + }, + { + "epoch": 0.6308904844433441, + "grad_norm": 2.173146126174901, + "learning_rate": 9.501849487539418e-07, + "loss": 0.4946, + "step": 15289 + }, + { + "epoch": 0.630931748782702, + "grad_norm": 3.833128561418864, + "learning_rate": 9.499984305958117e-07, + "loss": 0.4536, + "step": 15290 + }, + { + "epoch": 0.6309730131220599, + "grad_norm": 7.830508811182268, + "learning_rate": 9.498119222620859e-07, + "loss": 0.5697, + "step": 15291 + }, + { + "epoch": 0.6310142774614178, + "grad_norm": 15.92894917444242, + "learning_rate": 9.496254237560953e-07, + "loss": 0.4888, + "step": 15292 + }, + { + "epoch": 0.6310555418007757, + "grad_norm": 3.2583092736382526, + "learning_rate": 9.494389350811715e-07, + "loss": 0.53, + "step": 15293 + }, + { + "epoch": 0.6310968061401337, + "grad_norm": 4.087644994719474, + "learning_rate": 9.492524562406456e-07, + "loss": 0.4802, + "step": 15294 + }, + { + "epoch": 0.6311380704794917, + "grad_norm": 2.9327645331092245, + "learning_rate": 9.490659872378488e-07, + "loss": 0.4903, + "step": 15295 + }, + { + "epoch": 0.6311793348188496, + "grad_norm": 8.877870541705684, + "learning_rate": 9.488795280761113e-07, + "loss": 0.5491, + "step": 15296 + }, + { + "epoch": 0.6312205991582075, + "grad_norm": 4.808489479376212, + "learning_rate": 9.486930787587647e-07, + "loss": 0.576, + "step": 15297 + }, + { + "epoch": 0.6312618634975654, + "grad_norm": 10.535426345215868, + "learning_rate": 9.485066392891386e-07, + "loss": 0.5154, + "step": 15298 + }, + { + "epoch": 0.6313031278369233, + "grad_norm": 36.708986832183136, + "learning_rate": 9.483202096705634e-07, + "loss": 0.4703, + "step": 15299 + }, + { + "epoch": 0.6313443921762812, + "grad_norm": 7.268747118326475, + "learning_rate": 9.481337899063697e-07, + "loss": 0.4751, + "step": 15300 + }, + { + "epoch": 0.6313856565156392, + "grad_norm": 2.124048896902972, + "learning_rate": 9.479473799998872e-07, + "loss": 0.5444, + "step": 15301 + }, + { + "epoch": 0.6314269208549971, + "grad_norm": 3.4519592365742344, + "learning_rate": 9.477609799544448e-07, + "loss": 0.4971, + "step": 15302 + }, + { + "epoch": 0.631468185194355, + "grad_norm": 3.7761434993845473, + "learning_rate": 9.475745897733735e-07, + "loss": 0.5043, + "step": 15303 + }, + { + "epoch": 0.631509449533713, + "grad_norm": 2.1850619176197124, + "learning_rate": 9.473882094600013e-07, + "loss": 0.5095, + "step": 15304 + }, + { + "epoch": 0.6315507138730709, + "grad_norm": 9.387734085955225, + "learning_rate": 9.472018390176584e-07, + "loss": 0.5236, + "step": 15305 + }, + { + "epoch": 0.6315919782124289, + "grad_norm": 3.907585640045998, + "learning_rate": 9.470154784496736e-07, + "loss": 0.4481, + "step": 15306 + }, + { + "epoch": 0.6316332425517868, + "grad_norm": 2.291006921145527, + "learning_rate": 9.468291277593758e-07, + "loss": 0.4953, + "step": 15307 + }, + { + "epoch": 0.6316745068911447, + "grad_norm": 2.6488639396320166, + "learning_rate": 9.466427869500933e-07, + "loss": 0.592, + "step": 15308 + }, + { + "epoch": 0.6317157712305026, + "grad_norm": 6.9133404378212875, + "learning_rate": 9.464564560251552e-07, + "loss": 0.499, + "step": 15309 + }, + { + "epoch": 0.6317570355698605, + "grad_norm": 18.912528136186342, + "learning_rate": 9.462701349878893e-07, + "loss": 0.5365, + "step": 15310 + }, + { + "epoch": 0.6317982999092184, + "grad_norm": 3.585227370834632, + "learning_rate": 9.460838238416243e-07, + "loss": 0.4973, + "step": 15311 + }, + { + "epoch": 0.6318395642485763, + "grad_norm": 8.721042824387437, + "learning_rate": 9.458975225896875e-07, + "loss": 0.564, + "step": 15312 + }, + { + "epoch": 0.6318808285879343, + "grad_norm": 3.6745549268859117, + "learning_rate": 9.457112312354075e-07, + "loss": 0.5247, + "step": 15313 + }, + { + "epoch": 0.6319220929272923, + "grad_norm": 3.6052574306130065, + "learning_rate": 9.455249497821111e-07, + "loss": 0.5194, + "step": 15314 + }, + { + "epoch": 0.6319633572666502, + "grad_norm": 2.4887436558896203, + "learning_rate": 9.453386782331269e-07, + "loss": 0.5202, + "step": 15315 + }, + { + "epoch": 0.6320046216060081, + "grad_norm": 6.794762436915105, + "learning_rate": 9.451524165917812e-07, + "loss": 0.4847, + "step": 15316 + }, + { + "epoch": 0.632045885945366, + "grad_norm": 2.638476294371422, + "learning_rate": 9.449661648614013e-07, + "loss": 0.561, + "step": 15317 + }, + { + "epoch": 0.632087150284724, + "grad_norm": 2.6192452672178623, + "learning_rate": 9.447799230453138e-07, + "loss": 0.4885, + "step": 15318 + }, + { + "epoch": 0.6321284146240819, + "grad_norm": 2.28211644866217, + "learning_rate": 9.445936911468462e-07, + "loss": 0.4928, + "step": 15319 + }, + { + "epoch": 0.6321696789634398, + "grad_norm": 4.3369254378345445, + "learning_rate": 9.444074691693243e-07, + "loss": 0.4918, + "step": 15320 + }, + { + "epoch": 0.6322109433027977, + "grad_norm": 2.032001753616385, + "learning_rate": 9.442212571160753e-07, + "loss": 0.501, + "step": 15321 + }, + { + "epoch": 0.6322522076421556, + "grad_norm": 2.2850399267423684, + "learning_rate": 9.440350549904246e-07, + "loss": 0.5592, + "step": 15322 + }, + { + "epoch": 0.6322934719815135, + "grad_norm": 2.1756948981812867, + "learning_rate": 9.438488627956989e-07, + "loss": 0.5077, + "step": 15323 + }, + { + "epoch": 0.6323347363208714, + "grad_norm": 2.9479211243262204, + "learning_rate": 9.436626805352237e-07, + "loss": 0.5656, + "step": 15324 + }, + { + "epoch": 0.6323760006602295, + "grad_norm": 3.5035421612050404, + "learning_rate": 9.434765082123249e-07, + "loss": 0.5696, + "step": 15325 + }, + { + "epoch": 0.6324172649995874, + "grad_norm": 8.163437879715751, + "learning_rate": 9.432903458303277e-07, + "loss": 0.5074, + "step": 15326 + }, + { + "epoch": 0.6324585293389453, + "grad_norm": 2.506835597794846, + "learning_rate": 9.431041933925576e-07, + "loss": 0.5148, + "step": 15327 + }, + { + "epoch": 0.6324997936783032, + "grad_norm": 3.354660949730467, + "learning_rate": 9.429180509023403e-07, + "loss": 0.525, + "step": 15328 + }, + { + "epoch": 0.6325410580176611, + "grad_norm": 4.282023385527909, + "learning_rate": 9.427319183629996e-07, + "loss": 0.5002, + "step": 15329 + }, + { + "epoch": 0.6325823223570191, + "grad_norm": 2.1028791965195848, + "learning_rate": 9.425457957778612e-07, + "loss": 0.4964, + "step": 15330 + }, + { + "epoch": 0.632623586696377, + "grad_norm": 2.1238652835314733, + "learning_rate": 9.423596831502491e-07, + "loss": 0.4836, + "step": 15331 + }, + { + "epoch": 0.6326648510357349, + "grad_norm": 2.6503418795918874, + "learning_rate": 9.421735804834884e-07, + "loss": 0.4743, + "step": 15332 + }, + { + "epoch": 0.6327061153750928, + "grad_norm": 3.023677634337719, + "learning_rate": 9.419874877809027e-07, + "loss": 0.5267, + "step": 15333 + }, + { + "epoch": 0.6327473797144507, + "grad_norm": 3.0448420288620945, + "learning_rate": 9.418014050458166e-07, + "loss": 0.5475, + "step": 15334 + }, + { + "epoch": 0.6327886440538087, + "grad_norm": 1.8773672472353455, + "learning_rate": 9.416153322815536e-07, + "loss": 0.53, + "step": 15335 + }, + { + "epoch": 0.6328299083931667, + "grad_norm": 5.247145541683917, + "learning_rate": 9.414292694914378e-07, + "loss": 0.4524, + "step": 15336 + }, + { + "epoch": 0.6328711727325246, + "grad_norm": 2.4736374873715916, + "learning_rate": 9.412432166787929e-07, + "loss": 0.5283, + "step": 15337 + }, + { + "epoch": 0.6329124370718825, + "grad_norm": 2.509494676840792, + "learning_rate": 9.410571738469414e-07, + "loss": 0.5493, + "step": 15338 + }, + { + "epoch": 0.6329537014112404, + "grad_norm": 3.3681559024698537, + "learning_rate": 9.408711409992073e-07, + "loss": 0.4771, + "step": 15339 + }, + { + "epoch": 0.6329949657505983, + "grad_norm": 6.782354183381767, + "learning_rate": 9.406851181389133e-07, + "loss": 0.5534, + "step": 15340 + }, + { + "epoch": 0.6330362300899562, + "grad_norm": 7.123205813323592, + "learning_rate": 9.404991052693826e-07, + "loss": 0.5415, + "step": 15341 + }, + { + "epoch": 0.6330774944293142, + "grad_norm": 2.245455670485683, + "learning_rate": 9.403131023939372e-07, + "loss": 0.5072, + "step": 15342 + }, + { + "epoch": 0.6331187587686721, + "grad_norm": 2.1323570577992483, + "learning_rate": 9.401271095158999e-07, + "loss": 0.4775, + "step": 15343 + }, + { + "epoch": 0.63316002310803, + "grad_norm": 4.79830087613838, + "learning_rate": 9.399411266385931e-07, + "loss": 0.5305, + "step": 15344 + }, + { + "epoch": 0.633201287447388, + "grad_norm": 8.356925395294935, + "learning_rate": 9.397551537653386e-07, + "loss": 0.4874, + "step": 15345 + }, + { + "epoch": 0.6332425517867459, + "grad_norm": 5.830351538279345, + "learning_rate": 9.395691908994589e-07, + "loss": 0.5296, + "step": 15346 + }, + { + "epoch": 0.6332838161261038, + "grad_norm": 2.7343133935085535, + "learning_rate": 9.393832380442751e-07, + "loss": 0.5135, + "step": 15347 + }, + { + "epoch": 0.6333250804654618, + "grad_norm": 7.202954225796125, + "learning_rate": 9.391972952031094e-07, + "loss": 0.4879, + "step": 15348 + }, + { + "epoch": 0.6333663448048197, + "grad_norm": 2.778269735038953, + "learning_rate": 9.390113623792827e-07, + "loss": 0.5598, + "step": 15349 + }, + { + "epoch": 0.6334076091441776, + "grad_norm": 4.177418812136313, + "learning_rate": 9.388254395761165e-07, + "loss": 0.5131, + "step": 15350 + }, + { + "epoch": 0.6334488734835355, + "grad_norm": 2.820784532180767, + "learning_rate": 9.386395267969318e-07, + "loss": 0.5565, + "step": 15351 + }, + { + "epoch": 0.6334901378228934, + "grad_norm": 2.0269981239619934, + "learning_rate": 9.384536240450497e-07, + "loss": 0.5513, + "step": 15352 + }, + { + "epoch": 0.6335314021622513, + "grad_norm": 2.463748003633449, + "learning_rate": 9.382677313237902e-07, + "loss": 0.5179, + "step": 15353 + }, + { + "epoch": 0.6335726665016093, + "grad_norm": 2.4216271672026606, + "learning_rate": 9.38081848636475e-07, + "loss": 0.4757, + "step": 15354 + }, + { + "epoch": 0.6336139308409673, + "grad_norm": 1.978225116936367, + "learning_rate": 9.378959759864233e-07, + "loss": 0.5375, + "step": 15355 + }, + { + "epoch": 0.6336551951803252, + "grad_norm": 2.9557885770916927, + "learning_rate": 9.377101133769557e-07, + "loss": 0.5123, + "step": 15356 + }, + { + "epoch": 0.6336964595196831, + "grad_norm": 2.0873811257302117, + "learning_rate": 9.375242608113919e-07, + "loss": 0.4852, + "step": 15357 + }, + { + "epoch": 0.633737723859041, + "grad_norm": 2.46911556850758, + "learning_rate": 9.373384182930519e-07, + "loss": 0.4847, + "step": 15358 + }, + { + "epoch": 0.633778988198399, + "grad_norm": 2.5894811963738236, + "learning_rate": 9.371525858252554e-07, + "loss": 0.4747, + "step": 15359 + }, + { + "epoch": 0.6338202525377569, + "grad_norm": 2.462158898726986, + "learning_rate": 9.369667634113216e-07, + "loss": 0.5209, + "step": 15360 + }, + { + "epoch": 0.6338615168771148, + "grad_norm": 3.0682486449241577, + "learning_rate": 9.367809510545699e-07, + "loss": 0.5234, + "step": 15361 + }, + { + "epoch": 0.6339027812164727, + "grad_norm": 2.066724647467778, + "learning_rate": 9.365951487583196e-07, + "loss": 0.5377, + "step": 15362 + }, + { + "epoch": 0.6339440455558306, + "grad_norm": 1.695900141173882, + "learning_rate": 9.364093565258892e-07, + "loss": 0.5321, + "step": 15363 + }, + { + "epoch": 0.6339853098951885, + "grad_norm": 3.2644718635564756, + "learning_rate": 9.362235743605976e-07, + "loss": 0.4732, + "step": 15364 + }, + { + "epoch": 0.6340265742345466, + "grad_norm": 2.208814740542126, + "learning_rate": 9.360378022657632e-07, + "loss": 0.5258, + "step": 15365 + }, + { + "epoch": 0.6340678385739045, + "grad_norm": 2.5929498378658016, + "learning_rate": 9.358520402447047e-07, + "loss": 0.4919, + "step": 15366 + }, + { + "epoch": 0.6341091029132624, + "grad_norm": 3.638911503050248, + "learning_rate": 9.356662883007404e-07, + "loss": 0.492, + "step": 15367 + }, + { + "epoch": 0.6341503672526203, + "grad_norm": 3.0444871754836265, + "learning_rate": 9.354805464371873e-07, + "loss": 0.4733, + "step": 15368 + }, + { + "epoch": 0.6341916315919782, + "grad_norm": 4.815074978725637, + "learning_rate": 9.352948146573644e-07, + "loss": 0.557, + "step": 15369 + }, + { + "epoch": 0.6342328959313361, + "grad_norm": 2.172615805640582, + "learning_rate": 9.351090929645881e-07, + "loss": 0.5393, + "step": 15370 + }, + { + "epoch": 0.634274160270694, + "grad_norm": 2.4131743878159275, + "learning_rate": 9.34923381362177e-07, + "loss": 0.5315, + "step": 15371 + }, + { + "epoch": 0.634315424610052, + "grad_norm": 2.3144419983450764, + "learning_rate": 9.347376798534478e-07, + "loss": 0.5079, + "step": 15372 + }, + { + "epoch": 0.6343566889494099, + "grad_norm": 5.983219682177635, + "learning_rate": 9.345519884417176e-07, + "loss": 0.5187, + "step": 15373 + }, + { + "epoch": 0.6343979532887678, + "grad_norm": 3.5618628112099895, + "learning_rate": 9.343663071303035e-07, + "loss": 0.5324, + "step": 15374 + }, + { + "epoch": 0.6344392176281258, + "grad_norm": 10.983107334324947, + "learning_rate": 9.341806359225223e-07, + "loss": 0.4711, + "step": 15375 + }, + { + "epoch": 0.6344804819674837, + "grad_norm": 4.551610345003678, + "learning_rate": 9.339949748216903e-07, + "loss": 0.4917, + "step": 15376 + }, + { + "epoch": 0.6345217463068417, + "grad_norm": 5.496605243801979, + "learning_rate": 9.338093238311238e-07, + "loss": 0.5068, + "step": 15377 + }, + { + "epoch": 0.6345630106461996, + "grad_norm": 2.008969246346087, + "learning_rate": 9.336236829541394e-07, + "loss": 0.5144, + "step": 15378 + }, + { + "epoch": 0.6346042749855575, + "grad_norm": 6.2898595890564275, + "learning_rate": 9.334380521940525e-07, + "loss": 0.496, + "step": 15379 + }, + { + "epoch": 0.6346455393249154, + "grad_norm": 2.892743435342146, + "learning_rate": 9.332524315541801e-07, + "loss": 0.5588, + "step": 15380 + }, + { + "epoch": 0.6346868036642733, + "grad_norm": 3.0949970832220726, + "learning_rate": 9.330668210378368e-07, + "loss": 0.4636, + "step": 15381 + }, + { + "epoch": 0.6347280680036312, + "grad_norm": 2.2776185743884065, + "learning_rate": 9.328812206483378e-07, + "loss": 0.553, + "step": 15382 + }, + { + "epoch": 0.6347693323429892, + "grad_norm": 2.1596021618913506, + "learning_rate": 9.326956303889995e-07, + "loss": 0.5558, + "step": 15383 + }, + { + "epoch": 0.6348105966823471, + "grad_norm": 5.5031687821324935, + "learning_rate": 9.32510050263136e-07, + "loss": 0.4733, + "step": 15384 + }, + { + "epoch": 0.634851861021705, + "grad_norm": 1.9261891774533475, + "learning_rate": 9.323244802740628e-07, + "loss": 0.4482, + "step": 15385 + }, + { + "epoch": 0.634893125361063, + "grad_norm": 4.657327660850465, + "learning_rate": 9.321389204250943e-07, + "loss": 0.5274, + "step": 15386 + }, + { + "epoch": 0.6349343897004209, + "grad_norm": 2.2042782319528573, + "learning_rate": 9.319533707195455e-07, + "loss": 0.4911, + "step": 15387 + }, + { + "epoch": 0.6349756540397788, + "grad_norm": 2.631035936046962, + "learning_rate": 9.317678311607302e-07, + "loss": 0.4812, + "step": 15388 + }, + { + "epoch": 0.6350169183791368, + "grad_norm": 11.292749723881025, + "learning_rate": 9.315823017519632e-07, + "loss": 0.5431, + "step": 15389 + }, + { + "epoch": 0.6350581827184947, + "grad_norm": 4.793171461633909, + "learning_rate": 9.313967824965582e-07, + "loss": 0.5248, + "step": 15390 + }, + { + "epoch": 0.6350994470578526, + "grad_norm": 3.744583961867034, + "learning_rate": 9.312112733978293e-07, + "loss": 0.4974, + "step": 15391 + }, + { + "epoch": 0.6351407113972105, + "grad_norm": 2.3752508518856232, + "learning_rate": 9.310257744590894e-07, + "loss": 0.5447, + "step": 15392 + }, + { + "epoch": 0.6351819757365684, + "grad_norm": 3.088211180686742, + "learning_rate": 9.308402856836534e-07, + "loss": 0.4898, + "step": 15393 + }, + { + "epoch": 0.6352232400759263, + "grad_norm": 4.678511495136012, + "learning_rate": 9.306548070748334e-07, + "loss": 0.5184, + "step": 15394 + }, + { + "epoch": 0.6352645044152843, + "grad_norm": 2.7597308912593945, + "learning_rate": 9.304693386359428e-07, + "loss": 0.5513, + "step": 15395 + }, + { + "epoch": 0.6353057687546423, + "grad_norm": 7.4288205649117875, + "learning_rate": 9.302838803702941e-07, + "loss": 0.5089, + "step": 15396 + }, + { + "epoch": 0.6353470330940002, + "grad_norm": 2.615637971900542, + "learning_rate": 9.300984322812011e-07, + "loss": 0.4796, + "step": 15397 + }, + { + "epoch": 0.6353882974333581, + "grad_norm": 3.8010425429686228, + "learning_rate": 9.299129943719753e-07, + "loss": 0.5095, + "step": 15398 + }, + { + "epoch": 0.635429561772716, + "grad_norm": 8.849436642463047, + "learning_rate": 9.297275666459299e-07, + "loss": 0.5032, + "step": 15399 + }, + { + "epoch": 0.635470826112074, + "grad_norm": 2.6411014756161424, + "learning_rate": 9.295421491063764e-07, + "loss": 0.5127, + "step": 15400 + }, + { + "epoch": 0.6355120904514319, + "grad_norm": 2.3255899583941377, + "learning_rate": 9.293567417566276e-07, + "loss": 0.5212, + "step": 15401 + }, + { + "epoch": 0.6355533547907898, + "grad_norm": 11.47904047572323, + "learning_rate": 9.291713445999945e-07, + "loss": 0.5504, + "step": 15402 + }, + { + "epoch": 0.6355946191301477, + "grad_norm": 2.4335994931133675, + "learning_rate": 9.289859576397893e-07, + "loss": 0.5006, + "step": 15403 + }, + { + "epoch": 0.6356358834695056, + "grad_norm": 2.415928538783424, + "learning_rate": 9.288005808793234e-07, + "loss": 0.5006, + "step": 15404 + }, + { + "epoch": 0.6356771478088635, + "grad_norm": 11.443697013163396, + "learning_rate": 9.286152143219083e-07, + "loss": 0.5243, + "step": 15405 + }, + { + "epoch": 0.6357184121482216, + "grad_norm": 2.1128733662728236, + "learning_rate": 9.284298579708544e-07, + "loss": 0.5295, + "step": 15406 + }, + { + "epoch": 0.6357596764875795, + "grad_norm": 3.4687654098428813, + "learning_rate": 9.282445118294738e-07, + "loss": 0.5106, + "step": 15407 + }, + { + "epoch": 0.6358009408269374, + "grad_norm": 2.4011851508452566, + "learning_rate": 9.280591759010762e-07, + "loss": 0.568, + "step": 15408 + }, + { + "epoch": 0.6358422051662953, + "grad_norm": 3.1384945526382153, + "learning_rate": 9.278738501889722e-07, + "loss": 0.5655, + "step": 15409 + }, + { + "epoch": 0.6358834695056532, + "grad_norm": 3.007757066577148, + "learning_rate": 9.276885346964729e-07, + "loss": 0.4797, + "step": 15410 + }, + { + "epoch": 0.6359247338450111, + "grad_norm": 2.7934041750749317, + "learning_rate": 9.275032294268875e-07, + "loss": 0.4724, + "step": 15411 + }, + { + "epoch": 0.635965998184369, + "grad_norm": 1.9065184167442781, + "learning_rate": 9.27317934383527e-07, + "loss": 0.5658, + "step": 15412 + }, + { + "epoch": 0.636007262523727, + "grad_norm": 21.223939513099626, + "learning_rate": 9.271326495697006e-07, + "loss": 0.5252, + "step": 15413 + }, + { + "epoch": 0.6360485268630849, + "grad_norm": 2.359276378001383, + "learning_rate": 9.269473749887186e-07, + "loss": 0.4632, + "step": 15414 + }, + { + "epoch": 0.6360897912024428, + "grad_norm": 2.528727083684421, + "learning_rate": 9.267621106438896e-07, + "loss": 0.5347, + "step": 15415 + }, + { + "epoch": 0.6361310555418008, + "grad_norm": 2.376910871786557, + "learning_rate": 9.265768565385233e-07, + "loss": 0.4566, + "step": 15416 + }, + { + "epoch": 0.6361723198811587, + "grad_norm": 2.7560600811806566, + "learning_rate": 9.263916126759293e-07, + "loss": 0.5681, + "step": 15417 + }, + { + "epoch": 0.6362135842205167, + "grad_norm": 3.8429919009640714, + "learning_rate": 9.262063790594154e-07, + "loss": 0.495, + "step": 15418 + }, + { + "epoch": 0.6362548485598746, + "grad_norm": 2.1135563954248098, + "learning_rate": 9.260211556922917e-07, + "loss": 0.5018, + "step": 15419 + }, + { + "epoch": 0.6362961128992325, + "grad_norm": 2.333930184092805, + "learning_rate": 9.258359425778659e-07, + "loss": 0.5117, + "step": 15420 + }, + { + "epoch": 0.6363373772385904, + "grad_norm": 3.3752975030263124, + "learning_rate": 9.25650739719446e-07, + "loss": 0.5135, + "step": 15421 + }, + { + "epoch": 0.6363786415779483, + "grad_norm": 1.7102032659073905, + "learning_rate": 9.254655471203412e-07, + "loss": 0.4983, + "step": 15422 + }, + { + "epoch": 0.6364199059173062, + "grad_norm": 2.465840541748301, + "learning_rate": 9.252803647838587e-07, + "loss": 0.4957, + "step": 15423 + }, + { + "epoch": 0.6364611702566642, + "grad_norm": 4.591920267709842, + "learning_rate": 9.250951927133066e-07, + "loss": 0.4917, + "step": 15424 + }, + { + "epoch": 0.6365024345960221, + "grad_norm": 3.2397663883763648, + "learning_rate": 9.249100309119924e-07, + "loss": 0.4977, + "step": 15425 + }, + { + "epoch": 0.6365436989353801, + "grad_norm": 3.078841025779219, + "learning_rate": 9.24724879383224e-07, + "loss": 0.5059, + "step": 15426 + }, + { + "epoch": 0.636584963274738, + "grad_norm": 5.1240397256158925, + "learning_rate": 9.245397381303083e-07, + "loss": 0.519, + "step": 15427 + }, + { + "epoch": 0.6366262276140959, + "grad_norm": 4.957521713836837, + "learning_rate": 9.243546071565525e-07, + "loss": 0.525, + "step": 15428 + }, + { + "epoch": 0.6366674919534538, + "grad_norm": 5.5248253862463805, + "learning_rate": 9.241694864652633e-07, + "loss": 0.5386, + "step": 15429 + }, + { + "epoch": 0.6367087562928118, + "grad_norm": 2.8390778943131982, + "learning_rate": 9.239843760597477e-07, + "loss": 0.5732, + "step": 15430 + }, + { + "epoch": 0.6367500206321697, + "grad_norm": 3.9404973623030477, + "learning_rate": 9.23799275943312e-07, + "loss": 0.5106, + "step": 15431 + }, + { + "epoch": 0.6367912849715276, + "grad_norm": 4.806012658276774, + "learning_rate": 9.236141861192628e-07, + "loss": 0.5087, + "step": 15432 + }, + { + "epoch": 0.6368325493108855, + "grad_norm": 5.411630097087978, + "learning_rate": 9.234291065909067e-07, + "loss": 0.5208, + "step": 15433 + }, + { + "epoch": 0.6368738136502434, + "grad_norm": 2.7262595748184615, + "learning_rate": 9.232440373615486e-07, + "loss": 0.5542, + "step": 15434 + }, + { + "epoch": 0.6369150779896013, + "grad_norm": 5.658245249246879, + "learning_rate": 9.230589784344945e-07, + "loss": 0.5489, + "step": 15435 + }, + { + "epoch": 0.6369563423289594, + "grad_norm": 2.1985344061887773, + "learning_rate": 9.228739298130508e-07, + "loss": 0.5319, + "step": 15436 + }, + { + "epoch": 0.6369976066683173, + "grad_norm": 4.453849441576486, + "learning_rate": 9.22688891500522e-07, + "loss": 0.5303, + "step": 15437 + }, + { + "epoch": 0.6370388710076752, + "grad_norm": 9.949909242313739, + "learning_rate": 9.225038635002141e-07, + "loss": 0.5333, + "step": 15438 + }, + { + "epoch": 0.6370801353470331, + "grad_norm": 2.4235145969764753, + "learning_rate": 9.223188458154315e-07, + "loss": 0.5092, + "step": 15439 + }, + { + "epoch": 0.637121399686391, + "grad_norm": 2.3966702448253034, + "learning_rate": 9.2213383844948e-07, + "loss": 0.5182, + "step": 15440 + }, + { + "epoch": 0.637162664025749, + "grad_norm": 2.695391621286107, + "learning_rate": 9.219488414056633e-07, + "loss": 0.5189, + "step": 15441 + }, + { + "epoch": 0.6372039283651069, + "grad_norm": 3.661972226449829, + "learning_rate": 9.217638546872867e-07, + "loss": 0.563, + "step": 15442 + }, + { + "epoch": 0.6372451927044648, + "grad_norm": 4.2827018429347765, + "learning_rate": 9.215788782976538e-07, + "loss": 0.5112, + "step": 15443 + }, + { + "epoch": 0.6372864570438227, + "grad_norm": 3.727454377308406, + "learning_rate": 9.213939122400696e-07, + "loss": 0.5214, + "step": 15444 + }, + { + "epoch": 0.6373277213831806, + "grad_norm": 2.3540084863128374, + "learning_rate": 9.21208956517837e-07, + "loss": 0.4938, + "step": 15445 + }, + { + "epoch": 0.6373689857225385, + "grad_norm": 2.0376461679635742, + "learning_rate": 9.210240111342612e-07, + "loss": 0.5441, + "step": 15446 + }, + { + "epoch": 0.6374102500618966, + "grad_norm": 4.435392326143729, + "learning_rate": 9.208390760926445e-07, + "loss": 0.534, + "step": 15447 + }, + { + "epoch": 0.6374515144012545, + "grad_norm": 2.747757704855218, + "learning_rate": 9.206541513962907e-07, + "loss": 0.5484, + "step": 15448 + }, + { + "epoch": 0.6374927787406124, + "grad_norm": 4.585525738586002, + "learning_rate": 9.20469237048503e-07, + "loss": 0.4987, + "step": 15449 + }, + { + "epoch": 0.6375340430799703, + "grad_norm": 2.4243489300882253, + "learning_rate": 9.202843330525842e-07, + "loss": 0.5336, + "step": 15450 + }, + { + "epoch": 0.6375753074193282, + "grad_norm": 3.8069734110517017, + "learning_rate": 9.200994394118381e-07, + "loss": 0.5145, + "step": 15451 + }, + { + "epoch": 0.6376165717586861, + "grad_norm": 4.653065439947778, + "learning_rate": 9.19914556129566e-07, + "loss": 0.535, + "step": 15452 + }, + { + "epoch": 0.637657836098044, + "grad_norm": 2.423078114765091, + "learning_rate": 9.197296832090717e-07, + "loss": 0.5363, + "step": 15453 + }, + { + "epoch": 0.637699100437402, + "grad_norm": 2.972133428189724, + "learning_rate": 9.195448206536565e-07, + "loss": 0.4815, + "step": 15454 + }, + { + "epoch": 0.6377403647767599, + "grad_norm": 3.928247511792991, + "learning_rate": 9.193599684666228e-07, + "loss": 0.522, + "step": 15455 + }, + { + "epoch": 0.6377816291161178, + "grad_norm": 1.8629050026621028, + "learning_rate": 9.19175126651273e-07, + "loss": 0.5177, + "step": 15456 + }, + { + "epoch": 0.6378228934554758, + "grad_norm": 2.626950473111384, + "learning_rate": 9.189902952109081e-07, + "loss": 0.5395, + "step": 15457 + }, + { + "epoch": 0.6378641577948337, + "grad_norm": 4.669738816346196, + "learning_rate": 9.188054741488302e-07, + "loss": 0.5274, + "step": 15458 + }, + { + "epoch": 0.6379054221341917, + "grad_norm": 3.0085996944346216, + "learning_rate": 9.186206634683408e-07, + "loss": 0.5272, + "step": 15459 + }, + { + "epoch": 0.6379466864735496, + "grad_norm": 2.5182060945465747, + "learning_rate": 9.1843586317274e-07, + "loss": 0.4996, + "step": 15460 + }, + { + "epoch": 0.6379879508129075, + "grad_norm": 3.2334921695205763, + "learning_rate": 9.1825107326533e-07, + "loss": 0.5689, + "step": 15461 + }, + { + "epoch": 0.6380292151522654, + "grad_norm": 2.474068269707772, + "learning_rate": 9.180662937494109e-07, + "loss": 0.4985, + "step": 15462 + }, + { + "epoch": 0.6380704794916233, + "grad_norm": 1.9269900981734573, + "learning_rate": 9.178815246282837e-07, + "loss": 0.4792, + "step": 15463 + }, + { + "epoch": 0.6381117438309812, + "grad_norm": 6.094166995406067, + "learning_rate": 9.176967659052485e-07, + "loss": 0.4911, + "step": 15464 + }, + { + "epoch": 0.6381530081703392, + "grad_norm": 5.759098996772151, + "learning_rate": 9.175120175836061e-07, + "loss": 0.5425, + "step": 15465 + }, + { + "epoch": 0.6381942725096971, + "grad_norm": 2.8500196367139368, + "learning_rate": 9.173272796666559e-07, + "loss": 0.4889, + "step": 15466 + }, + { + "epoch": 0.6382355368490551, + "grad_norm": 3.8077551553999105, + "learning_rate": 9.171425521576983e-07, + "loss": 0.5395, + "step": 15467 + }, + { + "epoch": 0.638276801188413, + "grad_norm": 4.925077246085672, + "learning_rate": 9.169578350600327e-07, + "loss": 0.5028, + "step": 15468 + }, + { + "epoch": 0.6383180655277709, + "grad_norm": 2.5797981506174117, + "learning_rate": 9.167731283769589e-07, + "loss": 0.4786, + "step": 15469 + }, + { + "epoch": 0.6383593298671288, + "grad_norm": 1.9195820352621142, + "learning_rate": 9.165884321117756e-07, + "loss": 0.568, + "step": 15470 + }, + { + "epoch": 0.6384005942064868, + "grad_norm": 2.4704001998959737, + "learning_rate": 9.164037462677829e-07, + "loss": 0.5348, + "step": 15471 + }, + { + "epoch": 0.6384418585458447, + "grad_norm": 2.4468247823584273, + "learning_rate": 9.162190708482794e-07, + "loss": 0.5192, + "step": 15472 + }, + { + "epoch": 0.6384831228852026, + "grad_norm": 2.8287077363989614, + "learning_rate": 9.160344058565635e-07, + "loss": 0.4852, + "step": 15473 + }, + { + "epoch": 0.6385243872245605, + "grad_norm": 4.486940336588422, + "learning_rate": 9.158497512959335e-07, + "loss": 0.5464, + "step": 15474 + }, + { + "epoch": 0.6385656515639184, + "grad_norm": 3.6436573926463924, + "learning_rate": 9.156651071696887e-07, + "loss": 0.5675, + "step": 15475 + }, + { + "epoch": 0.6386069159032763, + "grad_norm": 4.906551101960597, + "learning_rate": 9.154804734811266e-07, + "loss": 0.4719, + "step": 15476 + }, + { + "epoch": 0.6386481802426344, + "grad_norm": 2.5951600063218, + "learning_rate": 9.152958502335456e-07, + "loss": 0.4809, + "step": 15477 + }, + { + "epoch": 0.6386894445819923, + "grad_norm": 3.061401607827275, + "learning_rate": 9.151112374302431e-07, + "loss": 0.4401, + "step": 15478 + }, + { + "epoch": 0.6387307089213502, + "grad_norm": 3.516189448928537, + "learning_rate": 9.149266350745178e-07, + "loss": 0.5399, + "step": 15479 + }, + { + "epoch": 0.6387719732607081, + "grad_norm": 12.666851461459126, + "learning_rate": 9.147420431696657e-07, + "loss": 0.5702, + "step": 15480 + }, + { + "epoch": 0.638813237600066, + "grad_norm": 7.428366647286494, + "learning_rate": 9.145574617189852e-07, + "loss": 0.5407, + "step": 15481 + }, + { + "epoch": 0.638854501939424, + "grad_norm": 2.4659481538905084, + "learning_rate": 9.143728907257728e-07, + "loss": 0.5326, + "step": 15482 + }, + { + "epoch": 0.6388957662787819, + "grad_norm": 7.218803269214397, + "learning_rate": 9.141883301933257e-07, + "loss": 0.5072, + "step": 15483 + }, + { + "epoch": 0.6389370306181398, + "grad_norm": 4.08525491009395, + "learning_rate": 9.140037801249403e-07, + "loss": 0.4928, + "step": 15484 + }, + { + "epoch": 0.6389782949574977, + "grad_norm": 1.9532352152917931, + "learning_rate": 9.138192405239141e-07, + "loss": 0.477, + "step": 15485 + }, + { + "epoch": 0.6390195592968556, + "grad_norm": 1.7263184921270065, + "learning_rate": 9.136347113935421e-07, + "loss": 0.519, + "step": 15486 + }, + { + "epoch": 0.6390608236362136, + "grad_norm": 4.876760159606005, + "learning_rate": 9.134501927371208e-07, + "loss": 0.495, + "step": 15487 + }, + { + "epoch": 0.6391020879755716, + "grad_norm": 2.183569611862123, + "learning_rate": 9.132656845579468e-07, + "loss": 0.4877, + "step": 15488 + }, + { + "epoch": 0.6391433523149295, + "grad_norm": 3.661592915242556, + "learning_rate": 9.13081186859315e-07, + "loss": 0.5313, + "step": 15489 + }, + { + "epoch": 0.6391846166542874, + "grad_norm": 2.730740677406065, + "learning_rate": 9.128966996445218e-07, + "loss": 0.5304, + "step": 15490 + }, + { + "epoch": 0.6392258809936453, + "grad_norm": 2.4277390250667192, + "learning_rate": 9.127122229168619e-07, + "loss": 0.5596, + "step": 15491 + }, + { + "epoch": 0.6392671453330032, + "grad_norm": 6.655360172079906, + "learning_rate": 9.125277566796312e-07, + "loss": 0.4829, + "step": 15492 + }, + { + "epoch": 0.6393084096723611, + "grad_norm": 2.9197230209014604, + "learning_rate": 9.123433009361245e-07, + "loss": 0.5063, + "step": 15493 + }, + { + "epoch": 0.639349674011719, + "grad_norm": 2.2318949121185696, + "learning_rate": 9.121588556896361e-07, + "loss": 0.5663, + "step": 15494 + }, + { + "epoch": 0.639390938351077, + "grad_norm": 2.611672299600496, + "learning_rate": 9.119744209434613e-07, + "loss": 0.4956, + "step": 15495 + }, + { + "epoch": 0.6394322026904349, + "grad_norm": 2.2193822172544597, + "learning_rate": 9.117899967008943e-07, + "loss": 0.5605, + "step": 15496 + }, + { + "epoch": 0.6394734670297929, + "grad_norm": 4.254805623035326, + "learning_rate": 9.116055829652296e-07, + "loss": 0.5162, + "step": 15497 + }, + { + "epoch": 0.6395147313691508, + "grad_norm": 2.672810689437584, + "learning_rate": 9.114211797397609e-07, + "loss": 0.4532, + "step": 15498 + }, + { + "epoch": 0.6395559957085087, + "grad_norm": 3.9094298899933353, + "learning_rate": 9.112367870277829e-07, + "loss": 0.4419, + "step": 15499 + }, + { + "epoch": 0.6395972600478667, + "grad_norm": 2.7179503538246546, + "learning_rate": 9.110524048325884e-07, + "loss": 0.4892, + "step": 15500 + }, + { + "epoch": 0.6396385243872246, + "grad_norm": 3.0893358212044455, + "learning_rate": 9.10868033157471e-07, + "loss": 0.4839, + "step": 15501 + }, + { + "epoch": 0.6396797887265825, + "grad_norm": 4.114327147236271, + "learning_rate": 9.106836720057247e-07, + "loss": 0.5532, + "step": 15502 + }, + { + "epoch": 0.6397210530659404, + "grad_norm": 1.963230617193323, + "learning_rate": 9.104993213806418e-07, + "loss": 0.4824, + "step": 15503 + }, + { + "epoch": 0.6397623174052983, + "grad_norm": 2.9303489497611004, + "learning_rate": 9.103149812855162e-07, + "loss": 0.5267, + "step": 15504 + }, + { + "epoch": 0.6398035817446562, + "grad_norm": 2.643008346422492, + "learning_rate": 9.101306517236396e-07, + "loss": 0.5408, + "step": 15505 + }, + { + "epoch": 0.6398448460840142, + "grad_norm": 4.513992844164053, + "learning_rate": 9.099463326983057e-07, + "loss": 0.5651, + "step": 15506 + }, + { + "epoch": 0.6398861104233721, + "grad_norm": 1.9269283100683583, + "learning_rate": 9.097620242128062e-07, + "loss": 0.5518, + "step": 15507 + }, + { + "epoch": 0.6399273747627301, + "grad_norm": 1.6779729861127168, + "learning_rate": 9.095777262704337e-07, + "loss": 0.5073, + "step": 15508 + }, + { + "epoch": 0.639968639102088, + "grad_norm": 2.3160748497931545, + "learning_rate": 9.093934388744796e-07, + "loss": 0.5753, + "step": 15509 + }, + { + "epoch": 0.6400099034414459, + "grad_norm": 2.0332639099508567, + "learning_rate": 9.092091620282366e-07, + "loss": 0.5502, + "step": 15510 + }, + { + "epoch": 0.6400511677808038, + "grad_norm": 3.4570152021943343, + "learning_rate": 9.090248957349954e-07, + "loss": 0.5391, + "step": 15511 + }, + { + "epoch": 0.6400924321201618, + "grad_norm": 2.7506458201139945, + "learning_rate": 9.08840639998049e-07, + "loss": 0.4601, + "step": 15512 + }, + { + "epoch": 0.6401336964595197, + "grad_norm": 3.9155507228194857, + "learning_rate": 9.086563948206868e-07, + "loss": 0.5503, + "step": 15513 + }, + { + "epoch": 0.6401749607988776, + "grad_norm": 2.9229653127682576, + "learning_rate": 9.084721602062009e-07, + "loss": 0.4807, + "step": 15514 + }, + { + "epoch": 0.6402162251382355, + "grad_norm": 2.7704572794786397, + "learning_rate": 9.082879361578817e-07, + "loss": 0.5147, + "step": 15515 + }, + { + "epoch": 0.6402574894775934, + "grad_norm": 2.5569610690299074, + "learning_rate": 9.081037226790205e-07, + "loss": 0.5268, + "step": 15516 + }, + { + "epoch": 0.6402987538169513, + "grad_norm": 10.793366106709199, + "learning_rate": 9.079195197729073e-07, + "loss": 0.5298, + "step": 15517 + }, + { + "epoch": 0.6403400181563094, + "grad_norm": 2.6788030968206393, + "learning_rate": 9.077353274428327e-07, + "loss": 0.5703, + "step": 15518 + }, + { + "epoch": 0.6403812824956673, + "grad_norm": 3.405024504557331, + "learning_rate": 9.075511456920867e-07, + "loss": 0.4873, + "step": 15519 + }, + { + "epoch": 0.6404225468350252, + "grad_norm": 6.111662066227481, + "learning_rate": 9.073669745239595e-07, + "loss": 0.5254, + "step": 15520 + }, + { + "epoch": 0.6404638111743831, + "grad_norm": 2.126976306913666, + "learning_rate": 9.071828139417405e-07, + "loss": 0.5152, + "step": 15521 + }, + { + "epoch": 0.640505075513741, + "grad_norm": 2.7389010431406535, + "learning_rate": 9.069986639487197e-07, + "loss": 0.4899, + "step": 15522 + }, + { + "epoch": 0.640546339853099, + "grad_norm": 3.134287757278277, + "learning_rate": 9.06814524548186e-07, + "loss": 0.5628, + "step": 15523 + }, + { + "epoch": 0.6405876041924569, + "grad_norm": 3.2921903744392207, + "learning_rate": 9.066303957434288e-07, + "loss": 0.5498, + "step": 15524 + }, + { + "epoch": 0.6406288685318148, + "grad_norm": 3.091611685986033, + "learning_rate": 9.064462775377377e-07, + "loss": 0.4764, + "step": 15525 + }, + { + "epoch": 0.6406701328711727, + "grad_norm": 4.141282107139417, + "learning_rate": 9.062621699344002e-07, + "loss": 0.5097, + "step": 15526 + }, + { + "epoch": 0.6407113972105306, + "grad_norm": 4.48136189063252, + "learning_rate": 9.060780729367061e-07, + "loss": 0.4636, + "step": 15527 + }, + { + "epoch": 0.6407526615498886, + "grad_norm": 3.3192219267443757, + "learning_rate": 9.058939865479428e-07, + "loss": 0.5657, + "step": 15528 + }, + { + "epoch": 0.6407939258892466, + "grad_norm": 2.713530546356724, + "learning_rate": 9.057099107713997e-07, + "loss": 0.5005, + "step": 15529 + }, + { + "epoch": 0.6408351902286045, + "grad_norm": 5.2405045926481915, + "learning_rate": 9.055258456103638e-07, + "loss": 0.5003, + "step": 15530 + }, + { + "epoch": 0.6408764545679624, + "grad_norm": 4.220072477148121, + "learning_rate": 9.05341791068124e-07, + "loss": 0.5288, + "step": 15531 + }, + { + "epoch": 0.6409177189073203, + "grad_norm": 3.071440925394878, + "learning_rate": 9.051577471479671e-07, + "loss": 0.4901, + "step": 15532 + }, + { + "epoch": 0.6409589832466782, + "grad_norm": 4.292273709870756, + "learning_rate": 9.049737138531808e-07, + "loss": 0.544, + "step": 15533 + }, + { + "epoch": 0.6410002475860361, + "grad_norm": 2.940956386873527, + "learning_rate": 9.047896911870527e-07, + "loss": 0.4737, + "step": 15534 + }, + { + "epoch": 0.641041511925394, + "grad_norm": 3.9456478692721464, + "learning_rate": 9.046056791528695e-07, + "loss": 0.5902, + "step": 15535 + }, + { + "epoch": 0.641082776264752, + "grad_norm": 2.618499461054502, + "learning_rate": 9.044216777539186e-07, + "loss": 0.4799, + "step": 15536 + }, + { + "epoch": 0.6411240406041099, + "grad_norm": 2.4894788122947795, + "learning_rate": 9.04237686993486e-07, + "loss": 0.5303, + "step": 15537 + }, + { + "epoch": 0.6411653049434679, + "grad_norm": 2.915275039114705, + "learning_rate": 9.040537068748594e-07, + "loss": 0.5512, + "step": 15538 + }, + { + "epoch": 0.6412065692828258, + "grad_norm": 2.632063320191113, + "learning_rate": 9.038697374013243e-07, + "loss": 0.5552, + "step": 15539 + }, + { + "epoch": 0.6412478336221837, + "grad_norm": 2.5324636074437867, + "learning_rate": 9.036857785761663e-07, + "loss": 0.4957, + "step": 15540 + }, + { + "epoch": 0.6412890979615417, + "grad_norm": 2.7118677577318215, + "learning_rate": 9.035018304026726e-07, + "loss": 0.5022, + "step": 15541 + }, + { + "epoch": 0.6413303623008996, + "grad_norm": 4.863359138773667, + "learning_rate": 9.033178928841281e-07, + "loss": 0.4994, + "step": 15542 + }, + { + "epoch": 0.6413716266402575, + "grad_norm": 2.5737000012836266, + "learning_rate": 9.03133966023819e-07, + "loss": 0.5213, + "step": 15543 + }, + { + "epoch": 0.6414128909796154, + "grad_norm": 2.362387483204983, + "learning_rate": 9.029500498250299e-07, + "loss": 0.5787, + "step": 15544 + }, + { + "epoch": 0.6414541553189733, + "grad_norm": 1.835518174911525, + "learning_rate": 9.027661442910472e-07, + "loss": 0.4838, + "step": 15545 + }, + { + "epoch": 0.6414954196583312, + "grad_norm": 5.265148372241062, + "learning_rate": 9.025822494251546e-07, + "loss": 0.5376, + "step": 15546 + }, + { + "epoch": 0.6415366839976891, + "grad_norm": 3.2031079186199443, + "learning_rate": 9.023983652306379e-07, + "loss": 0.5347, + "step": 15547 + }, + { + "epoch": 0.6415779483370472, + "grad_norm": 2.8122504270029127, + "learning_rate": 9.02214491710781e-07, + "loss": 0.523, + "step": 15548 + }, + { + "epoch": 0.6416192126764051, + "grad_norm": 1.9332533587728535, + "learning_rate": 9.020306288688691e-07, + "loss": 0.5043, + "step": 15549 + }, + { + "epoch": 0.641660477015763, + "grad_norm": 6.115608703557748, + "learning_rate": 9.018467767081859e-07, + "loss": 0.5919, + "step": 15550 + }, + { + "epoch": 0.6417017413551209, + "grad_norm": 2.9898856815717534, + "learning_rate": 9.016629352320162e-07, + "loss": 0.5468, + "step": 15551 + }, + { + "epoch": 0.6417430056944788, + "grad_norm": 1.9153665707194096, + "learning_rate": 9.014791044436426e-07, + "loss": 0.5463, + "step": 15552 + }, + { + "epoch": 0.6417842700338368, + "grad_norm": 4.770825895516918, + "learning_rate": 9.012952843463498e-07, + "loss": 0.5018, + "step": 15553 + }, + { + "epoch": 0.6418255343731947, + "grad_norm": 3.0050393785920932, + "learning_rate": 9.011114749434205e-07, + "loss": 0.4863, + "step": 15554 + }, + { + "epoch": 0.6418667987125526, + "grad_norm": 2.322994703338806, + "learning_rate": 9.009276762381388e-07, + "loss": 0.4448, + "step": 15555 + }, + { + "epoch": 0.6419080630519105, + "grad_norm": 3.2492818034269937, + "learning_rate": 9.007438882337873e-07, + "loss": 0.4947, + "step": 15556 + }, + { + "epoch": 0.6419493273912684, + "grad_norm": 2.7289338005988415, + "learning_rate": 9.005601109336491e-07, + "loss": 0.491, + "step": 15557 + }, + { + "epoch": 0.6419905917306264, + "grad_norm": 3.316090420795141, + "learning_rate": 9.003763443410068e-07, + "loss": 0.5304, + "step": 15558 + }, + { + "epoch": 0.6420318560699844, + "grad_norm": 1.7935307474514806, + "learning_rate": 9.001925884591431e-07, + "loss": 0.474, + "step": 15559 + }, + { + "epoch": 0.6420731204093423, + "grad_norm": 2.6693995376045523, + "learning_rate": 9.0000884329134e-07, + "loss": 0.5007, + "step": 15560 + }, + { + "epoch": 0.6421143847487002, + "grad_norm": 5.068390712692365, + "learning_rate": 8.998251088408805e-07, + "loss": 0.5071, + "step": 15561 + }, + { + "epoch": 0.6421556490880581, + "grad_norm": 2.7625611018650167, + "learning_rate": 8.996413851110453e-07, + "loss": 0.52, + "step": 15562 + }, + { + "epoch": 0.642196913427416, + "grad_norm": 2.5251168286082555, + "learning_rate": 8.994576721051174e-07, + "loss": 0.5204, + "step": 15563 + }, + { + "epoch": 0.6422381777667739, + "grad_norm": 3.2425498610831065, + "learning_rate": 8.99273969826378e-07, + "loss": 0.5187, + "step": 15564 + }, + { + "epoch": 0.6422794421061319, + "grad_norm": 2.6244426998243573, + "learning_rate": 8.990902782781078e-07, + "loss": 0.5876, + "step": 15565 + }, + { + "epoch": 0.6423207064454898, + "grad_norm": 2.347655157569625, + "learning_rate": 8.989065974635885e-07, + "loss": 0.4528, + "step": 15566 + }, + { + "epoch": 0.6423619707848477, + "grad_norm": 2.5479743919001483, + "learning_rate": 8.987229273861009e-07, + "loss": 0.5263, + "step": 15567 + }, + { + "epoch": 0.6424032351242056, + "grad_norm": 3.890890762631124, + "learning_rate": 8.985392680489265e-07, + "loss": 0.5072, + "step": 15568 + }, + { + "epoch": 0.6424444994635636, + "grad_norm": 2.9522087125269447, + "learning_rate": 8.983556194553447e-07, + "loss": 0.5437, + "step": 15569 + }, + { + "epoch": 0.6424857638029216, + "grad_norm": 2.8853678832977394, + "learning_rate": 8.981719816086374e-07, + "loss": 0.4809, + "step": 15570 + }, + { + "epoch": 0.6425270281422795, + "grad_norm": 3.895392292490087, + "learning_rate": 8.979883545120837e-07, + "loss": 0.5686, + "step": 15571 + }, + { + "epoch": 0.6425682924816374, + "grad_norm": 10.216722370620397, + "learning_rate": 8.978047381689636e-07, + "loss": 0.5163, + "step": 15572 + }, + { + "epoch": 0.6426095568209953, + "grad_norm": 2.249783324487456, + "learning_rate": 8.976211325825579e-07, + "loss": 0.5321, + "step": 15573 + }, + { + "epoch": 0.6426508211603532, + "grad_norm": 3.5743826930536464, + "learning_rate": 8.974375377561452e-07, + "loss": 0.5201, + "step": 15574 + }, + { + "epoch": 0.6426920854997111, + "grad_norm": 2.785052342373868, + "learning_rate": 8.972539536930059e-07, + "loss": 0.5173, + "step": 15575 + }, + { + "epoch": 0.642733349839069, + "grad_norm": 4.770269205742913, + "learning_rate": 8.970703803964186e-07, + "loss": 0.4889, + "step": 15576 + }, + { + "epoch": 0.642774614178427, + "grad_norm": 3.2526407674249955, + "learning_rate": 8.968868178696629e-07, + "loss": 0.5181, + "step": 15577 + }, + { + "epoch": 0.6428158785177849, + "grad_norm": 4.725368210023567, + "learning_rate": 8.967032661160175e-07, + "loss": 0.5076, + "step": 15578 + }, + { + "epoch": 0.6428571428571429, + "grad_norm": 3.0861807414069613, + "learning_rate": 8.965197251387604e-07, + "loss": 0.5065, + "step": 15579 + }, + { + "epoch": 0.6428984071965008, + "grad_norm": 2.6213775342159122, + "learning_rate": 8.963361949411711e-07, + "loss": 0.5238, + "step": 15580 + }, + { + "epoch": 0.6429396715358587, + "grad_norm": 4.522540487234428, + "learning_rate": 8.961526755265273e-07, + "loss": 0.4635, + "step": 15581 + }, + { + "epoch": 0.6429809358752167, + "grad_norm": 11.522401342084454, + "learning_rate": 8.959691668981073e-07, + "loss": 0.5288, + "step": 15582 + }, + { + "epoch": 0.6430222002145746, + "grad_norm": 2.3438737419180127, + "learning_rate": 8.957856690591889e-07, + "loss": 0.5011, + "step": 15583 + }, + { + "epoch": 0.6430634645539325, + "grad_norm": 8.071032741545153, + "learning_rate": 8.956021820130503e-07, + "loss": 0.5089, + "step": 15584 + }, + { + "epoch": 0.6431047288932904, + "grad_norm": 6.988497053345534, + "learning_rate": 8.954187057629682e-07, + "loss": 0.5223, + "step": 15585 + }, + { + "epoch": 0.6431459932326483, + "grad_norm": 4.109472294820531, + "learning_rate": 8.952352403122211e-07, + "loss": 0.532, + "step": 15586 + }, + { + "epoch": 0.6431872575720062, + "grad_norm": 3.1765910262905006, + "learning_rate": 8.95051785664085e-07, + "loss": 0.472, + "step": 15587 + }, + { + "epoch": 0.6432285219113641, + "grad_norm": 3.6798776283446584, + "learning_rate": 8.948683418218376e-07, + "loss": 0.4997, + "step": 15588 + }, + { + "epoch": 0.6432697862507222, + "grad_norm": 2.0086185062277973, + "learning_rate": 8.94684908788755e-07, + "loss": 0.5127, + "step": 15589 + }, + { + "epoch": 0.6433110505900801, + "grad_norm": 2.2234003259686888, + "learning_rate": 8.945014865681147e-07, + "loss": 0.537, + "step": 15590 + }, + { + "epoch": 0.643352314929438, + "grad_norm": 2.5828913156153623, + "learning_rate": 8.94318075163193e-07, + "loss": 0.5738, + "step": 15591 + }, + { + "epoch": 0.6433935792687959, + "grad_norm": 5.425674775120465, + "learning_rate": 8.941346745772653e-07, + "loss": 0.5284, + "step": 15592 + }, + { + "epoch": 0.6434348436081538, + "grad_norm": 2.8685934694650705, + "learning_rate": 8.939512848136074e-07, + "loss": 0.5198, + "step": 15593 + }, + { + "epoch": 0.6434761079475118, + "grad_norm": 12.608491127273743, + "learning_rate": 8.937679058754962e-07, + "loss": 0.6223, + "step": 15594 + }, + { + "epoch": 0.6435173722868697, + "grad_norm": 4.698716816131398, + "learning_rate": 8.935845377662063e-07, + "loss": 0.5601, + "step": 15595 + }, + { + "epoch": 0.6435586366262276, + "grad_norm": 7.738391234048355, + "learning_rate": 8.934011804890141e-07, + "loss": 0.5839, + "step": 15596 + }, + { + "epoch": 0.6435999009655855, + "grad_norm": 2.1545035132745007, + "learning_rate": 8.932178340471938e-07, + "loss": 0.4988, + "step": 15597 + }, + { + "epoch": 0.6436411653049434, + "grad_norm": 3.929958861484762, + "learning_rate": 8.930344984440212e-07, + "loss": 0.5115, + "step": 15598 + }, + { + "epoch": 0.6436824296443014, + "grad_norm": 1.7594386859118873, + "learning_rate": 8.928511736827706e-07, + "loss": 0.4608, + "step": 15599 + }, + { + "epoch": 0.6437236939836594, + "grad_norm": 1.9726886843994729, + "learning_rate": 8.926678597667172e-07, + "loss": 0.5089, + "step": 15600 + }, + { + "epoch": 0.6437649583230173, + "grad_norm": 8.061810008999885, + "learning_rate": 8.924845566991349e-07, + "loss": 0.4993, + "step": 15601 + }, + { + "epoch": 0.6438062226623752, + "grad_norm": 6.019726444100167, + "learning_rate": 8.923012644832984e-07, + "loss": 0.518, + "step": 15602 + }, + { + "epoch": 0.6438474870017331, + "grad_norm": 2.100712476063814, + "learning_rate": 8.921179831224813e-07, + "loss": 0.5033, + "step": 15603 + }, + { + "epoch": 0.643888751341091, + "grad_norm": 4.2447478352376296, + "learning_rate": 8.919347126199584e-07, + "loss": 0.5161, + "step": 15604 + }, + { + "epoch": 0.6439300156804489, + "grad_norm": 3.549191980148037, + "learning_rate": 8.917514529790022e-07, + "loss": 0.5263, + "step": 15605 + }, + { + "epoch": 0.6439712800198069, + "grad_norm": 3.125482786081734, + "learning_rate": 8.915682042028866e-07, + "loss": 0.5002, + "step": 15606 + }, + { + "epoch": 0.6440125443591648, + "grad_norm": 2.6598380112703484, + "learning_rate": 8.913849662948849e-07, + "loss": 0.5451, + "step": 15607 + }, + { + "epoch": 0.6440538086985227, + "grad_norm": 2.1951004377434282, + "learning_rate": 8.912017392582702e-07, + "loss": 0.5047, + "step": 15608 + }, + { + "epoch": 0.6440950730378807, + "grad_norm": 2.356551361146002, + "learning_rate": 8.910185230963156e-07, + "loss": 0.4881, + "step": 15609 + }, + { + "epoch": 0.6441363373772386, + "grad_norm": 2.4276825617332873, + "learning_rate": 8.908353178122936e-07, + "loss": 0.5216, + "step": 15610 + }, + { + "epoch": 0.6441776017165965, + "grad_norm": 4.186188942467199, + "learning_rate": 8.906521234094764e-07, + "loss": 0.4485, + "step": 15611 + }, + { + "epoch": 0.6442188660559545, + "grad_norm": 5.275526018169425, + "learning_rate": 8.90468939891137e-07, + "loss": 0.5339, + "step": 15612 + }, + { + "epoch": 0.6442601303953124, + "grad_norm": 2.5214047361953136, + "learning_rate": 8.902857672605467e-07, + "loss": 0.507, + "step": 15613 + }, + { + "epoch": 0.6443013947346703, + "grad_norm": 5.782736334728701, + "learning_rate": 8.901026055209784e-07, + "loss": 0.4744, + "step": 15614 + }, + { + "epoch": 0.6443426590740282, + "grad_norm": 4.371621378800043, + "learning_rate": 8.899194546757028e-07, + "loss": 0.5313, + "step": 15615 + }, + { + "epoch": 0.6443839234133861, + "grad_norm": 2.6805791444765448, + "learning_rate": 8.897363147279924e-07, + "loss": 0.5494, + "step": 15616 + }, + { + "epoch": 0.644425187752744, + "grad_norm": 2.216564926820057, + "learning_rate": 8.895531856811182e-07, + "loss": 0.5272, + "step": 15617 + }, + { + "epoch": 0.644466452092102, + "grad_norm": 2.5708585707515446, + "learning_rate": 8.893700675383507e-07, + "loss": 0.5543, + "step": 15618 + }, + { + "epoch": 0.64450771643146, + "grad_norm": 2.675957165618161, + "learning_rate": 8.891869603029616e-07, + "loss": 0.5172, + "step": 15619 + }, + { + "epoch": 0.6445489807708179, + "grad_norm": 2.5062098268589073, + "learning_rate": 8.890038639782211e-07, + "loss": 0.5634, + "step": 15620 + }, + { + "epoch": 0.6445902451101758, + "grad_norm": 7.263712093485814, + "learning_rate": 8.888207785674004e-07, + "loss": 0.5629, + "step": 15621 + }, + { + "epoch": 0.6446315094495337, + "grad_norm": 3.152389058104581, + "learning_rate": 8.886377040737694e-07, + "loss": 0.4953, + "step": 15622 + }, + { + "epoch": 0.6446727737888917, + "grad_norm": 3.271528969345283, + "learning_rate": 8.884546405005985e-07, + "loss": 0.5581, + "step": 15623 + }, + { + "epoch": 0.6447140381282496, + "grad_norm": 3.358115637406, + "learning_rate": 8.882715878511573e-07, + "loss": 0.4831, + "step": 15624 + }, + { + "epoch": 0.6447553024676075, + "grad_norm": 2.75069079907509, + "learning_rate": 8.880885461287162e-07, + "loss": 0.484, + "step": 15625 + }, + { + "epoch": 0.6447965668069654, + "grad_norm": 3.1090674186979133, + "learning_rate": 8.87905515336544e-07, + "loss": 0.5226, + "step": 15626 + }, + { + "epoch": 0.6448378311463233, + "grad_norm": 3.501736865481992, + "learning_rate": 8.87722495477911e-07, + "loss": 0.5146, + "step": 15627 + }, + { + "epoch": 0.6448790954856812, + "grad_norm": 14.289913578571829, + "learning_rate": 8.875394865560856e-07, + "loss": 0.5209, + "step": 15628 + }, + { + "epoch": 0.6449203598250391, + "grad_norm": 2.410149897015231, + "learning_rate": 8.873564885743374e-07, + "loss": 0.5066, + "step": 15629 + }, + { + "epoch": 0.6449616241643972, + "grad_norm": 3.7332615233345177, + "learning_rate": 8.871735015359351e-07, + "loss": 0.5411, + "step": 15630 + }, + { + "epoch": 0.6450028885037551, + "grad_norm": 75.74785825387262, + "learning_rate": 8.869905254441469e-07, + "loss": 0.5018, + "step": 15631 + }, + { + "epoch": 0.645044152843113, + "grad_norm": 2.5371238045593545, + "learning_rate": 8.868075603022412e-07, + "loss": 0.5191, + "step": 15632 + }, + { + "epoch": 0.6450854171824709, + "grad_norm": 2.7834138323357447, + "learning_rate": 8.866246061134865e-07, + "loss": 0.4395, + "step": 15633 + }, + { + "epoch": 0.6451266815218288, + "grad_norm": 6.553489640979395, + "learning_rate": 8.864416628811507e-07, + "loss": 0.5301, + "step": 15634 + }, + { + "epoch": 0.6451679458611868, + "grad_norm": 3.2907921255799115, + "learning_rate": 8.862587306085018e-07, + "loss": 0.5639, + "step": 15635 + }, + { + "epoch": 0.6452092102005447, + "grad_norm": 2.4763999362353255, + "learning_rate": 8.860758092988072e-07, + "loss": 0.4969, + "step": 15636 + }, + { + "epoch": 0.6452504745399026, + "grad_norm": 6.536965855372783, + "learning_rate": 8.858928989553347e-07, + "loss": 0.5014, + "step": 15637 + }, + { + "epoch": 0.6452917388792605, + "grad_norm": 2.4177824148521307, + "learning_rate": 8.85709999581351e-07, + "loss": 0.5096, + "step": 15638 + }, + { + "epoch": 0.6453330032186184, + "grad_norm": 3.1187778892678883, + "learning_rate": 8.855271111801236e-07, + "loss": 0.5366, + "step": 15639 + }, + { + "epoch": 0.6453742675579764, + "grad_norm": 1.9332174777273736, + "learning_rate": 8.85344233754919e-07, + "loss": 0.4353, + "step": 15640 + }, + { + "epoch": 0.6454155318973344, + "grad_norm": 3.344877251863566, + "learning_rate": 8.851613673090042e-07, + "loss": 0.5483, + "step": 15641 + }, + { + "epoch": 0.6454567962366923, + "grad_norm": 2.0027057502418364, + "learning_rate": 8.849785118456453e-07, + "loss": 0.5762, + "step": 15642 + }, + { + "epoch": 0.6454980605760502, + "grad_norm": 3.1026834185219845, + "learning_rate": 8.847956673681092e-07, + "loss": 0.5271, + "step": 15643 + }, + { + "epoch": 0.6455393249154081, + "grad_norm": 3.294133492499633, + "learning_rate": 8.846128338796613e-07, + "loss": 0.5167, + "step": 15644 + }, + { + "epoch": 0.645580589254766, + "grad_norm": 2.8499000293932433, + "learning_rate": 8.844300113835675e-07, + "loss": 0.5217, + "step": 15645 + }, + { + "epoch": 0.6456218535941239, + "grad_norm": 7.298210585834288, + "learning_rate": 8.842471998830935e-07, + "loss": 0.5597, + "step": 15646 + }, + { + "epoch": 0.6456631179334819, + "grad_norm": 3.324802208889738, + "learning_rate": 8.840643993815049e-07, + "loss": 0.5821, + "step": 15647 + }, + { + "epoch": 0.6457043822728398, + "grad_norm": 2.1932218512888775, + "learning_rate": 8.838816098820671e-07, + "loss": 0.5289, + "step": 15648 + }, + { + "epoch": 0.6457456466121977, + "grad_norm": 3.4475188920123117, + "learning_rate": 8.836988313880453e-07, + "loss": 0.5565, + "step": 15649 + }, + { + "epoch": 0.6457869109515557, + "grad_norm": 6.81909084462608, + "learning_rate": 8.835160639027037e-07, + "loss": 0.5579, + "step": 15650 + }, + { + "epoch": 0.6458281752909136, + "grad_norm": 3.569395834422517, + "learning_rate": 8.833333074293076e-07, + "loss": 0.5795, + "step": 15651 + }, + { + "epoch": 0.6458694396302715, + "grad_norm": 3.7468788989937702, + "learning_rate": 8.831505619711209e-07, + "loss": 0.5353, + "step": 15652 + }, + { + "epoch": 0.6459107039696295, + "grad_norm": 2.419410798294058, + "learning_rate": 8.829678275314088e-07, + "loss": 0.5432, + "step": 15653 + }, + { + "epoch": 0.6459519683089874, + "grad_norm": 6.784799176774755, + "learning_rate": 8.827851041134344e-07, + "loss": 0.5297, + "step": 15654 + }, + { + "epoch": 0.6459932326483453, + "grad_norm": 2.635905796210423, + "learning_rate": 8.826023917204626e-07, + "loss": 0.5516, + "step": 15655 + }, + { + "epoch": 0.6460344969877032, + "grad_norm": 5.892527408979048, + "learning_rate": 8.824196903557567e-07, + "loss": 0.4841, + "step": 15656 + }, + { + "epoch": 0.6460757613270611, + "grad_norm": 2.166274179719623, + "learning_rate": 8.822370000225795e-07, + "loss": 0.5661, + "step": 15657 + }, + { + "epoch": 0.646117025666419, + "grad_norm": 3.311473916449201, + "learning_rate": 8.820543207241953e-07, + "loss": 0.5339, + "step": 15658 + }, + { + "epoch": 0.646158290005777, + "grad_norm": 2.968319929770461, + "learning_rate": 8.818716524638663e-07, + "loss": 0.5224, + "step": 15659 + }, + { + "epoch": 0.646199554345135, + "grad_norm": 2.238960452691225, + "learning_rate": 8.816889952448563e-07, + "loss": 0.5463, + "step": 15660 + }, + { + "epoch": 0.6462408186844929, + "grad_norm": 7.1664958155859795, + "learning_rate": 8.815063490704274e-07, + "loss": 0.5073, + "step": 15661 + }, + { + "epoch": 0.6462820830238508, + "grad_norm": 1.7651305990992328, + "learning_rate": 8.813237139438426e-07, + "loss": 0.547, + "step": 15662 + }, + { + "epoch": 0.6463233473632087, + "grad_norm": 2.3161326214284634, + "learning_rate": 8.811410898683638e-07, + "loss": 0.468, + "step": 15663 + }, + { + "epoch": 0.6463646117025666, + "grad_norm": 5.176570964868959, + "learning_rate": 8.809584768472534e-07, + "loss": 0.5371, + "step": 15664 + }, + { + "epoch": 0.6464058760419246, + "grad_norm": 2.5552571090749963, + "learning_rate": 8.807758748837728e-07, + "loss": 0.5373, + "step": 15665 + }, + { + "epoch": 0.6464471403812825, + "grad_norm": 2.589169183844355, + "learning_rate": 8.805932839811845e-07, + "loss": 0.5116, + "step": 15666 + }, + { + "epoch": 0.6464884047206404, + "grad_norm": 34.71808126818614, + "learning_rate": 8.804107041427494e-07, + "loss": 0.5096, + "step": 15667 + }, + { + "epoch": 0.6465296690599983, + "grad_norm": 3.057397518898184, + "learning_rate": 8.802281353717296e-07, + "loss": 0.5319, + "step": 15668 + }, + { + "epoch": 0.6465709333993562, + "grad_norm": 3.1316477737688215, + "learning_rate": 8.800455776713852e-07, + "loss": 0.4839, + "step": 15669 + }, + { + "epoch": 0.6466121977387143, + "grad_norm": 3.006661591710227, + "learning_rate": 8.798630310449788e-07, + "loss": 0.5191, + "step": 15670 + }, + { + "epoch": 0.6466534620780722, + "grad_norm": 6.722408878381481, + "learning_rate": 8.796804954957688e-07, + "loss": 0.4983, + "step": 15671 + }, + { + "epoch": 0.6466947264174301, + "grad_norm": 2.7783704301121466, + "learning_rate": 8.794979710270174e-07, + "loss": 0.5139, + "step": 15672 + }, + { + "epoch": 0.646735990756788, + "grad_norm": 2.873880966697677, + "learning_rate": 8.793154576419842e-07, + "loss": 0.5579, + "step": 15673 + }, + { + "epoch": 0.6467772550961459, + "grad_norm": 4.3063499802836285, + "learning_rate": 8.791329553439299e-07, + "loss": 0.4448, + "step": 15674 + }, + { + "epoch": 0.6468185194355038, + "grad_norm": 5.465318400742278, + "learning_rate": 8.78950464136114e-07, + "loss": 0.5542, + "step": 15675 + }, + { + "epoch": 0.6468597837748618, + "grad_norm": 2.325924844431502, + "learning_rate": 8.787679840217966e-07, + "loss": 0.562, + "step": 15676 + }, + { + "epoch": 0.6469010481142197, + "grad_norm": 3.0943869251937866, + "learning_rate": 8.78585515004237e-07, + "loss": 0.5038, + "step": 15677 + }, + { + "epoch": 0.6469423124535776, + "grad_norm": 3.7109313753298845, + "learning_rate": 8.784030570866947e-07, + "loss": 0.5439, + "step": 15678 + }, + { + "epoch": 0.6469835767929355, + "grad_norm": 3.991863757224105, + "learning_rate": 8.782206102724286e-07, + "loss": 0.5186, + "step": 15679 + }, + { + "epoch": 0.6470248411322935, + "grad_norm": 2.8306681580829496, + "learning_rate": 8.780381745646982e-07, + "loss": 0.5468, + "step": 15680 + }, + { + "epoch": 0.6470661054716514, + "grad_norm": 5.972782603451997, + "learning_rate": 8.778557499667617e-07, + "loss": 0.4939, + "step": 15681 + }, + { + "epoch": 0.6471073698110094, + "grad_norm": 1.967941188270819, + "learning_rate": 8.776733364818778e-07, + "loss": 0.4762, + "step": 15682 + }, + { + "epoch": 0.6471486341503673, + "grad_norm": 5.901489323354427, + "learning_rate": 8.774909341133056e-07, + "loss": 0.5301, + "step": 15683 + }, + { + "epoch": 0.6471898984897252, + "grad_norm": 4.65654075270576, + "learning_rate": 8.77308542864302e-07, + "loss": 0.5387, + "step": 15684 + }, + { + "epoch": 0.6472311628290831, + "grad_norm": 4.000594165881598, + "learning_rate": 8.771261627381257e-07, + "loss": 0.5689, + "step": 15685 + }, + { + "epoch": 0.647272427168441, + "grad_norm": 5.406640296824675, + "learning_rate": 8.769437937380341e-07, + "loss": 0.4904, + "step": 15686 + }, + { + "epoch": 0.6473136915077989, + "grad_norm": 4.839528765091638, + "learning_rate": 8.767614358672851e-07, + "loss": 0.5245, + "step": 15687 + }, + { + "epoch": 0.6473549558471569, + "grad_norm": 3.309931225040161, + "learning_rate": 8.765790891291361e-07, + "loss": 0.5239, + "step": 15688 + }, + { + "epoch": 0.6473962201865148, + "grad_norm": 2.8945567932173173, + "learning_rate": 8.763967535268438e-07, + "loss": 0.5039, + "step": 15689 + }, + { + "epoch": 0.6474374845258727, + "grad_norm": 2.009027841030801, + "learning_rate": 8.762144290636659e-07, + "loss": 0.5096, + "step": 15690 + }, + { + "epoch": 0.6474787488652307, + "grad_norm": 2.225432473233452, + "learning_rate": 8.760321157428583e-07, + "loss": 0.5098, + "step": 15691 + }, + { + "epoch": 0.6475200132045886, + "grad_norm": 2.5191143577068353, + "learning_rate": 8.758498135676785e-07, + "loss": 0.5015, + "step": 15692 + }, + { + "epoch": 0.6475612775439465, + "grad_norm": 2.0675729566407686, + "learning_rate": 8.75667522541382e-07, + "loss": 0.5356, + "step": 15693 + }, + { + "epoch": 0.6476025418833045, + "grad_norm": 1.9886835896053834, + "learning_rate": 8.754852426672254e-07, + "loss": 0.4484, + "step": 15694 + }, + { + "epoch": 0.6476438062226624, + "grad_norm": 1.9441509231967622, + "learning_rate": 8.753029739484653e-07, + "loss": 0.4971, + "step": 15695 + }, + { + "epoch": 0.6476850705620203, + "grad_norm": 2.750552486849548, + "learning_rate": 8.75120716388357e-07, + "loss": 0.496, + "step": 15696 + }, + { + "epoch": 0.6477263349013782, + "grad_norm": 2.3962717541344194, + "learning_rate": 8.74938469990155e-07, + "loss": 0.5075, + "step": 15697 + }, + { + "epoch": 0.6477675992407361, + "grad_norm": 3.4953569345479627, + "learning_rate": 8.747562347571159e-07, + "loss": 0.4896, + "step": 15698 + }, + { + "epoch": 0.647808863580094, + "grad_norm": 5.473210221514945, + "learning_rate": 8.745740106924942e-07, + "loss": 0.551, + "step": 15699 + }, + { + "epoch": 0.647850127919452, + "grad_norm": 2.391746503307748, + "learning_rate": 8.743917977995461e-07, + "loss": 0.5128, + "step": 15700 + }, + { + "epoch": 0.64789139225881, + "grad_norm": 4.512926561468643, + "learning_rate": 8.74209596081525e-07, + "loss": 0.5443, + "step": 15701 + }, + { + "epoch": 0.6479326565981679, + "grad_norm": 2.879287956645754, + "learning_rate": 8.740274055416858e-07, + "loss": 0.5117, + "step": 15702 + }, + { + "epoch": 0.6479739209375258, + "grad_norm": 4.965738541786994, + "learning_rate": 8.738452261832832e-07, + "loss": 0.528, + "step": 15703 + }, + { + "epoch": 0.6480151852768837, + "grad_norm": 2.2098152947150926, + "learning_rate": 8.736630580095718e-07, + "loss": 0.4581, + "step": 15704 + }, + { + "epoch": 0.6480564496162416, + "grad_norm": 7.961357847028765, + "learning_rate": 8.734809010238045e-07, + "loss": 0.4824, + "step": 15705 + }, + { + "epoch": 0.6480977139555996, + "grad_norm": 10.894188145527897, + "learning_rate": 8.732987552292356e-07, + "loss": 0.4809, + "step": 15706 + }, + { + "epoch": 0.6481389782949575, + "grad_norm": 4.047237380166614, + "learning_rate": 8.731166206291188e-07, + "loss": 0.529, + "step": 15707 + }, + { + "epoch": 0.6481802426343154, + "grad_norm": 6.3581017686623404, + "learning_rate": 8.729344972267077e-07, + "loss": 0.4925, + "step": 15708 + }, + { + "epoch": 0.6482215069736733, + "grad_norm": 2.2702202960979116, + "learning_rate": 8.727523850252555e-07, + "loss": 0.5197, + "step": 15709 + }, + { + "epoch": 0.6482627713130312, + "grad_norm": 2.5069739731296194, + "learning_rate": 8.725702840280139e-07, + "loss": 0.525, + "step": 15710 + }, + { + "epoch": 0.6483040356523893, + "grad_norm": 2.409319321769406, + "learning_rate": 8.723881942382367e-07, + "loss": 0.526, + "step": 15711 + }, + { + "epoch": 0.6483452999917472, + "grad_norm": 2.6965373442710305, + "learning_rate": 8.722061156591772e-07, + "loss": 0.5315, + "step": 15712 + }, + { + "epoch": 0.6483865643311051, + "grad_norm": 2.948155267183643, + "learning_rate": 8.720240482940862e-07, + "loss": 0.5255, + "step": 15713 + }, + { + "epoch": 0.648427828670463, + "grad_norm": 2.29728881873387, + "learning_rate": 8.718419921462167e-07, + "loss": 0.5091, + "step": 15714 + }, + { + "epoch": 0.6484690930098209, + "grad_norm": 5.5712995413242785, + "learning_rate": 8.716599472188206e-07, + "loss": 0.5259, + "step": 15715 + }, + { + "epoch": 0.6485103573491788, + "grad_norm": 4.684528496303827, + "learning_rate": 8.714779135151503e-07, + "loss": 0.5286, + "step": 15716 + }, + { + "epoch": 0.6485516216885368, + "grad_norm": 3.2817475647455523, + "learning_rate": 8.712958910384563e-07, + "loss": 0.4973, + "step": 15717 + }, + { + "epoch": 0.6485928860278947, + "grad_norm": 2.7377520996518983, + "learning_rate": 8.711138797919904e-07, + "loss": 0.4927, + "step": 15718 + }, + { + "epoch": 0.6486341503672526, + "grad_norm": 3.2817682940782675, + "learning_rate": 8.709318797790037e-07, + "loss": 0.5695, + "step": 15719 + }, + { + "epoch": 0.6486754147066105, + "grad_norm": 3.2208481460224903, + "learning_rate": 8.707498910027481e-07, + "loss": 0.5402, + "step": 15720 + }, + { + "epoch": 0.6487166790459685, + "grad_norm": 2.351413537727891, + "learning_rate": 8.70567913466473e-07, + "loss": 0.4789, + "step": 15721 + }, + { + "epoch": 0.6487579433853264, + "grad_norm": 9.266923430009292, + "learning_rate": 8.7038594717343e-07, + "loss": 0.5124, + "step": 15722 + }, + { + "epoch": 0.6487992077246844, + "grad_norm": 2.680621720730506, + "learning_rate": 8.702039921268685e-07, + "loss": 0.4812, + "step": 15723 + }, + { + "epoch": 0.6488404720640423, + "grad_norm": 2.965644561153036, + "learning_rate": 8.700220483300392e-07, + "loss": 0.5468, + "step": 15724 + }, + { + "epoch": 0.6488817364034002, + "grad_norm": 4.1193987904885985, + "learning_rate": 8.698401157861927e-07, + "loss": 0.5242, + "step": 15725 + }, + { + "epoch": 0.6489230007427581, + "grad_norm": 7.420358435996327, + "learning_rate": 8.696581944985775e-07, + "loss": 0.5378, + "step": 15726 + }, + { + "epoch": 0.648964265082116, + "grad_norm": 2.7713012265006887, + "learning_rate": 8.69476284470444e-07, + "loss": 0.5045, + "step": 15727 + }, + { + "epoch": 0.6490055294214739, + "grad_norm": 2.159622043587167, + "learning_rate": 8.692943857050419e-07, + "loss": 0.5003, + "step": 15728 + }, + { + "epoch": 0.6490467937608319, + "grad_norm": 3.352559509900227, + "learning_rate": 8.691124982056191e-07, + "loss": 0.4828, + "step": 15729 + }, + { + "epoch": 0.6490880581001898, + "grad_norm": 2.321147875146162, + "learning_rate": 8.689306219754256e-07, + "loss": 0.4972, + "step": 15730 + }, + { + "epoch": 0.6491293224395478, + "grad_norm": 6.445111262594216, + "learning_rate": 8.687487570177096e-07, + "loss": 0.5588, + "step": 15731 + }, + { + "epoch": 0.6491705867789057, + "grad_norm": 3.524576754760186, + "learning_rate": 8.685669033357207e-07, + "loss": 0.5429, + "step": 15732 + }, + { + "epoch": 0.6492118511182636, + "grad_norm": 9.366900054394398, + "learning_rate": 8.683850609327059e-07, + "loss": 0.5089, + "step": 15733 + }, + { + "epoch": 0.6492531154576215, + "grad_norm": 2.828442723748023, + "learning_rate": 8.682032298119139e-07, + "loss": 0.5476, + "step": 15734 + }, + { + "epoch": 0.6492943797969795, + "grad_norm": 1.827283767069822, + "learning_rate": 8.680214099765935e-07, + "loss": 0.4464, + "step": 15735 + }, + { + "epoch": 0.6493356441363374, + "grad_norm": 2.9068954373723344, + "learning_rate": 8.678396014299909e-07, + "loss": 0.5209, + "step": 15736 + }, + { + "epoch": 0.6493769084756953, + "grad_norm": 2.8848716519075963, + "learning_rate": 8.676578041753552e-07, + "loss": 0.5953, + "step": 15737 + }, + { + "epoch": 0.6494181728150532, + "grad_norm": 2.3405215508150135, + "learning_rate": 8.674760182159322e-07, + "loss": 0.4842, + "step": 15738 + }, + { + "epoch": 0.6494594371544111, + "grad_norm": 3.0423807222973407, + "learning_rate": 8.672942435549701e-07, + "loss": 0.522, + "step": 15739 + }, + { + "epoch": 0.649500701493769, + "grad_norm": 3.6195174293385106, + "learning_rate": 8.671124801957153e-07, + "loss": 0.5019, + "step": 15740 + }, + { + "epoch": 0.6495419658331271, + "grad_norm": 2.6806579602120553, + "learning_rate": 8.669307281414156e-07, + "loss": 0.4841, + "step": 15741 + }, + { + "epoch": 0.649583230172485, + "grad_norm": 2.4839438147126462, + "learning_rate": 8.667489873953163e-07, + "loss": 0.5077, + "step": 15742 + }, + { + "epoch": 0.6496244945118429, + "grad_norm": 3.9641424212003487, + "learning_rate": 8.66567257960664e-07, + "loss": 0.5629, + "step": 15743 + }, + { + "epoch": 0.6496657588512008, + "grad_norm": 4.0569970456665105, + "learning_rate": 8.663855398407052e-07, + "loss": 0.4696, + "step": 15744 + }, + { + "epoch": 0.6497070231905587, + "grad_norm": 2.794099718050806, + "learning_rate": 8.662038330386863e-07, + "loss": 0.5396, + "step": 15745 + }, + { + "epoch": 0.6497482875299166, + "grad_norm": 11.827718069734335, + "learning_rate": 8.660221375578521e-07, + "loss": 0.5354, + "step": 15746 + }, + { + "epoch": 0.6497895518692746, + "grad_norm": 9.600314923383475, + "learning_rate": 8.658404534014482e-07, + "loss": 0.5356, + "step": 15747 + }, + { + "epoch": 0.6498308162086325, + "grad_norm": 3.3311022898199973, + "learning_rate": 8.656587805727208e-07, + "loss": 0.5059, + "step": 15748 + }, + { + "epoch": 0.6498720805479904, + "grad_norm": 2.1187842136241546, + "learning_rate": 8.654771190749147e-07, + "loss": 0.5103, + "step": 15749 + }, + { + "epoch": 0.6499133448873483, + "grad_norm": 2.544162964159777, + "learning_rate": 8.652954689112738e-07, + "loss": 0.5374, + "step": 15750 + }, + { + "epoch": 0.6499546092267062, + "grad_norm": 2.835044560024495, + "learning_rate": 8.651138300850437e-07, + "loss": 0.4878, + "step": 15751 + }, + { + "epoch": 0.6499958735660643, + "grad_norm": 2.4130129361903614, + "learning_rate": 8.649322025994687e-07, + "loss": 0.5358, + "step": 15752 + }, + { + "epoch": 0.6500371379054222, + "grad_norm": 2.2927508051097796, + "learning_rate": 8.647505864577941e-07, + "loss": 0.5059, + "step": 15753 + }, + { + "epoch": 0.6500784022447801, + "grad_norm": 4.379730961906828, + "learning_rate": 8.645689816632622e-07, + "loss": 0.5092, + "step": 15754 + }, + { + "epoch": 0.650119666584138, + "grad_norm": 3.6806153872099667, + "learning_rate": 8.643873882191182e-07, + "loss": 0.4695, + "step": 15755 + }, + { + "epoch": 0.6501609309234959, + "grad_norm": 2.5818727069530927, + "learning_rate": 8.642058061286054e-07, + "loss": 0.5275, + "step": 15756 + }, + { + "epoch": 0.6502021952628538, + "grad_norm": 3.1457433803910164, + "learning_rate": 8.640242353949681e-07, + "loss": 0.5085, + "step": 15757 + }, + { + "epoch": 0.6502434596022117, + "grad_norm": 7.614841390992181, + "learning_rate": 8.638426760214484e-07, + "loss": 0.5266, + "step": 15758 + }, + { + "epoch": 0.6502847239415697, + "grad_norm": 2.3726793639106334, + "learning_rate": 8.636611280112898e-07, + "loss": 0.5627, + "step": 15759 + }, + { + "epoch": 0.6503259882809276, + "grad_norm": 2.7913983263964313, + "learning_rate": 8.634795913677354e-07, + "loss": 0.5123, + "step": 15760 + }, + { + "epoch": 0.6503672526202855, + "grad_norm": 1.5569046892435983, + "learning_rate": 8.632980660940282e-07, + "loss": 0.4859, + "step": 15761 + }, + { + "epoch": 0.6504085169596435, + "grad_norm": 1.7937509739064634, + "learning_rate": 8.631165521934105e-07, + "loss": 0.572, + "step": 15762 + }, + { + "epoch": 0.6504497812990014, + "grad_norm": 3.38734412241502, + "learning_rate": 8.629350496691237e-07, + "loss": 0.4941, + "step": 15763 + }, + { + "epoch": 0.6504910456383594, + "grad_norm": 2.472765039601219, + "learning_rate": 8.627535585244107e-07, + "loss": 0.5488, + "step": 15764 + }, + { + "epoch": 0.6505323099777173, + "grad_norm": 2.53598962828673, + "learning_rate": 8.625720787625134e-07, + "loss": 0.524, + "step": 15765 + }, + { + "epoch": 0.6505735743170752, + "grad_norm": 2.598432180556597, + "learning_rate": 8.623906103866737e-07, + "loss": 0.4993, + "step": 15766 + }, + { + "epoch": 0.6506148386564331, + "grad_norm": 3.9742068668043125, + "learning_rate": 8.622091534001325e-07, + "loss": 0.5711, + "step": 15767 + }, + { + "epoch": 0.650656102995791, + "grad_norm": 4.565680848192427, + "learning_rate": 8.620277078061312e-07, + "loss": 0.5524, + "step": 15768 + }, + { + "epoch": 0.6506973673351489, + "grad_norm": 2.310136495288918, + "learning_rate": 8.618462736079115e-07, + "loss": 0.5123, + "step": 15769 + }, + { + "epoch": 0.6507386316745069, + "grad_norm": 3.22937671897911, + "learning_rate": 8.616648508087132e-07, + "loss": 0.4824, + "step": 15770 + }, + { + "epoch": 0.6507798960138648, + "grad_norm": 2.347083703557404, + "learning_rate": 8.614834394117776e-07, + "loss": 0.5404, + "step": 15771 + }, + { + "epoch": 0.6508211603532228, + "grad_norm": 3.9071254144549963, + "learning_rate": 8.61302039420345e-07, + "loss": 0.5079, + "step": 15772 + }, + { + "epoch": 0.6508624246925807, + "grad_norm": 2.6035573733842092, + "learning_rate": 8.611206508376563e-07, + "loss": 0.5052, + "step": 15773 + }, + { + "epoch": 0.6509036890319386, + "grad_norm": 10.520240316539756, + "learning_rate": 8.609392736669507e-07, + "loss": 0.5576, + "step": 15774 + }, + { + "epoch": 0.6509449533712965, + "grad_norm": 3.6382568315752346, + "learning_rate": 8.607579079114688e-07, + "loss": 0.5327, + "step": 15775 + }, + { + "epoch": 0.6509862177106545, + "grad_norm": 4.914147235702503, + "learning_rate": 8.605765535744491e-07, + "loss": 0.5281, + "step": 15776 + }, + { + "epoch": 0.6510274820500124, + "grad_norm": 2.9154324258245667, + "learning_rate": 8.603952106591318e-07, + "loss": 0.5185, + "step": 15777 + }, + { + "epoch": 0.6510687463893703, + "grad_norm": 2.643251850603799, + "learning_rate": 8.602138791687567e-07, + "loss": 0.486, + "step": 15778 + }, + { + "epoch": 0.6511100107287282, + "grad_norm": 2.1118804477304267, + "learning_rate": 8.600325591065616e-07, + "loss": 0.5153, + "step": 15779 + }, + { + "epoch": 0.6511512750680861, + "grad_norm": 2.975254494177528, + "learning_rate": 8.598512504757858e-07, + "loss": 0.493, + "step": 15780 + }, + { + "epoch": 0.651192539407444, + "grad_norm": 3.9670381453936763, + "learning_rate": 8.596699532796684e-07, + "loss": 0.5187, + "step": 15781 + }, + { + "epoch": 0.6512338037468021, + "grad_norm": 2.0552302829118103, + "learning_rate": 8.594886675214478e-07, + "loss": 0.4937, + "step": 15782 + }, + { + "epoch": 0.65127506808616, + "grad_norm": 3.5582037170991776, + "learning_rate": 8.593073932043615e-07, + "loss": 0.4845, + "step": 15783 + }, + { + "epoch": 0.6513163324255179, + "grad_norm": 3.0501126241081704, + "learning_rate": 8.591261303316479e-07, + "loss": 0.5293, + "step": 15784 + }, + { + "epoch": 0.6513575967648758, + "grad_norm": 12.274451571039688, + "learning_rate": 8.589448789065447e-07, + "loss": 0.4768, + "step": 15785 + }, + { + "epoch": 0.6513988611042337, + "grad_norm": 2.2786873658245748, + "learning_rate": 8.587636389322906e-07, + "loss": 0.5314, + "step": 15786 + }, + { + "epoch": 0.6514401254435916, + "grad_norm": 3.059696009306627, + "learning_rate": 8.585824104121212e-07, + "loss": 0.5391, + "step": 15787 + }, + { + "epoch": 0.6514813897829496, + "grad_norm": 2.3808416447165244, + "learning_rate": 8.58401193349275e-07, + "loss": 0.5233, + "step": 15788 + }, + { + "epoch": 0.6515226541223075, + "grad_norm": 2.2546012028838756, + "learning_rate": 8.58219987746988e-07, + "loss": 0.4502, + "step": 15789 + }, + { + "epoch": 0.6515639184616654, + "grad_norm": 3.0645847790557457, + "learning_rate": 8.58038793608498e-07, + "loss": 0.5442, + "step": 15790 + }, + { + "epoch": 0.6516051828010233, + "grad_norm": 2.4003372348400234, + "learning_rate": 8.578576109370407e-07, + "loss": 0.5356, + "step": 15791 + }, + { + "epoch": 0.6516464471403813, + "grad_norm": 2.6481817541279824, + "learning_rate": 8.576764397358528e-07, + "loss": 0.5259, + "step": 15792 + }, + { + "epoch": 0.6516877114797393, + "grad_norm": 3.342409227929848, + "learning_rate": 8.574952800081705e-07, + "loss": 0.5429, + "step": 15793 + }, + { + "epoch": 0.6517289758190972, + "grad_norm": 2.338277236316517, + "learning_rate": 8.573141317572305e-07, + "loss": 0.5021, + "step": 15794 + }, + { + "epoch": 0.6517702401584551, + "grad_norm": 2.725198586007378, + "learning_rate": 8.571329949862671e-07, + "loss": 0.5181, + "step": 15795 + }, + { + "epoch": 0.651811504497813, + "grad_norm": 4.844560558589546, + "learning_rate": 8.569518696985167e-07, + "loss": 0.5179, + "step": 15796 + }, + { + "epoch": 0.6518527688371709, + "grad_norm": 4.125761244919947, + "learning_rate": 8.567707558972146e-07, + "loss": 0.5598, + "step": 15797 + }, + { + "epoch": 0.6518940331765288, + "grad_norm": 2.6689095821297317, + "learning_rate": 8.565896535855964e-07, + "loss": 0.5126, + "step": 15798 + }, + { + "epoch": 0.6519352975158867, + "grad_norm": 2.336346692851651, + "learning_rate": 8.564085627668961e-07, + "loss": 0.4952, + "step": 15799 + }, + { + "epoch": 0.6519765618552447, + "grad_norm": 13.610621695313757, + "learning_rate": 8.562274834443488e-07, + "loss": 0.499, + "step": 15800 + }, + { + "epoch": 0.6520178261946026, + "grad_norm": 2.5813114847031637, + "learning_rate": 8.560464156211898e-07, + "loss": 0.5477, + "step": 15801 + }, + { + "epoch": 0.6520590905339606, + "grad_norm": 1.9593705590641006, + "learning_rate": 8.558653593006521e-07, + "loss": 0.5369, + "step": 15802 + }, + { + "epoch": 0.6521003548733185, + "grad_norm": 3.6105460327224757, + "learning_rate": 8.55684314485971e-07, + "loss": 0.4985, + "step": 15803 + }, + { + "epoch": 0.6521416192126764, + "grad_norm": 3.5515119839365012, + "learning_rate": 8.555032811803793e-07, + "loss": 0.5195, + "step": 15804 + }, + { + "epoch": 0.6521828835520344, + "grad_norm": 2.6326323212163465, + "learning_rate": 8.553222593871113e-07, + "loss": 0.5454, + "step": 15805 + }, + { + "epoch": 0.6522241478913923, + "grad_norm": 6.211590783377466, + "learning_rate": 8.55141249109401e-07, + "loss": 0.456, + "step": 15806 + }, + { + "epoch": 0.6522654122307502, + "grad_norm": 6.1396735168203405, + "learning_rate": 8.549602503504807e-07, + "loss": 0.4937, + "step": 15807 + }, + { + "epoch": 0.6523066765701081, + "grad_norm": 5.493701215293245, + "learning_rate": 8.54779263113584e-07, + "loss": 0.522, + "step": 15808 + }, + { + "epoch": 0.652347940909466, + "grad_norm": 8.052860538756345, + "learning_rate": 8.545982874019436e-07, + "loss": 0.5267, + "step": 15809 + }, + { + "epoch": 0.6523892052488239, + "grad_norm": 16.688631227497087, + "learning_rate": 8.544173232187929e-07, + "loss": 0.5044, + "step": 15810 + }, + { + "epoch": 0.6524304695881818, + "grad_norm": 2.5504743867405586, + "learning_rate": 8.542363705673633e-07, + "loss": 0.4974, + "step": 15811 + }, + { + "epoch": 0.6524717339275398, + "grad_norm": 2.512463731742436, + "learning_rate": 8.540554294508878e-07, + "loss": 0.5217, + "step": 15812 + }, + { + "epoch": 0.6525129982668978, + "grad_norm": 2.97871127997064, + "learning_rate": 8.538744998725979e-07, + "loss": 0.4998, + "step": 15813 + }, + { + "epoch": 0.6525542626062557, + "grad_norm": 2.318082194764373, + "learning_rate": 8.536935818357265e-07, + "loss": 0.5478, + "step": 15814 + }, + { + "epoch": 0.6525955269456136, + "grad_norm": 8.912113438638627, + "learning_rate": 8.535126753435048e-07, + "loss": 0.4905, + "step": 15815 + }, + { + "epoch": 0.6526367912849715, + "grad_norm": 5.987858919859545, + "learning_rate": 8.533317803991631e-07, + "loss": 0.5219, + "step": 15816 + }, + { + "epoch": 0.6526780556243295, + "grad_norm": 2.705227774764192, + "learning_rate": 8.531508970059335e-07, + "loss": 0.4971, + "step": 15817 + }, + { + "epoch": 0.6527193199636874, + "grad_norm": 2.44282386459562, + "learning_rate": 8.529700251670472e-07, + "loss": 0.4928, + "step": 15818 + }, + { + "epoch": 0.6527605843030453, + "grad_norm": 15.786353166935601, + "learning_rate": 8.527891648857354e-07, + "loss": 0.5766, + "step": 15819 + }, + { + "epoch": 0.6528018486424032, + "grad_norm": 8.525225740387832, + "learning_rate": 8.526083161652275e-07, + "loss": 0.4902, + "step": 15820 + }, + { + "epoch": 0.6528431129817611, + "grad_norm": 2.6864261868954413, + "learning_rate": 8.524274790087546e-07, + "loss": 0.5195, + "step": 15821 + }, + { + "epoch": 0.652884377321119, + "grad_norm": 2.50273025314505, + "learning_rate": 8.52246653419547e-07, + "loss": 0.4885, + "step": 15822 + }, + { + "epoch": 0.6529256416604771, + "grad_norm": 4.043153021763876, + "learning_rate": 8.520658394008354e-07, + "loss": 0.5486, + "step": 15823 + }, + { + "epoch": 0.652966905999835, + "grad_norm": 2.8870231553954477, + "learning_rate": 8.518850369558478e-07, + "loss": 0.5661, + "step": 15824 + }, + { + "epoch": 0.6530081703391929, + "grad_norm": 3.1269311533591813, + "learning_rate": 8.51704246087815e-07, + "loss": 0.559, + "step": 15825 + }, + { + "epoch": 0.6530494346785508, + "grad_norm": 1.9682512151526017, + "learning_rate": 8.515234667999661e-07, + "loss": 0.4986, + "step": 15826 + }, + { + "epoch": 0.6530906990179087, + "grad_norm": 2.8314416560267146, + "learning_rate": 8.513426990955315e-07, + "loss": 0.5684, + "step": 15827 + }, + { + "epoch": 0.6531319633572666, + "grad_norm": 2.737977160816963, + "learning_rate": 8.511619429777378e-07, + "loss": 0.5015, + "step": 15828 + }, + { + "epoch": 0.6531732276966246, + "grad_norm": 3.337980208042425, + "learning_rate": 8.50981198449815e-07, + "loss": 0.5645, + "step": 15829 + }, + { + "epoch": 0.6532144920359825, + "grad_norm": 2.6050060135609283, + "learning_rate": 8.508004655149914e-07, + "loss": 0.4991, + "step": 15830 + }, + { + "epoch": 0.6532557563753404, + "grad_norm": 2.575302650176443, + "learning_rate": 8.506197441764964e-07, + "loss": 0.549, + "step": 15831 + }, + { + "epoch": 0.6532970207146983, + "grad_norm": 4.97839198713497, + "learning_rate": 8.504390344375565e-07, + "loss": 0.5345, + "step": 15832 + }, + { + "epoch": 0.6533382850540563, + "grad_norm": 2.9135433478199655, + "learning_rate": 8.502583363014005e-07, + "loss": 0.4994, + "step": 15833 + }, + { + "epoch": 0.6533795493934142, + "grad_norm": 2.2195320327736896, + "learning_rate": 8.500776497712559e-07, + "loss": 0.5085, + "step": 15834 + }, + { + "epoch": 0.6534208137327722, + "grad_norm": 7.863132437622034, + "learning_rate": 8.49896974850351e-07, + "loss": 0.4659, + "step": 15835 + }, + { + "epoch": 0.6534620780721301, + "grad_norm": 2.274585235988425, + "learning_rate": 8.497163115419119e-07, + "loss": 0.4976, + "step": 15836 + }, + { + "epoch": 0.653503342411488, + "grad_norm": 2.9403618570316787, + "learning_rate": 8.495356598491663e-07, + "loss": 0.5154, + "step": 15837 + }, + { + "epoch": 0.6535446067508459, + "grad_norm": 1.9031444790967125, + "learning_rate": 8.493550197753408e-07, + "loss": 0.5148, + "step": 15838 + }, + { + "epoch": 0.6535858710902038, + "grad_norm": 2.158545244972434, + "learning_rate": 8.49174391323663e-07, + "loss": 0.5258, + "step": 15839 + }, + { + "epoch": 0.6536271354295617, + "grad_norm": 3.2149013302643743, + "learning_rate": 8.489937744973587e-07, + "loss": 0.5291, + "step": 15840 + }, + { + "epoch": 0.6536683997689197, + "grad_norm": 2.5784625352175374, + "learning_rate": 8.488131692996536e-07, + "loss": 0.5381, + "step": 15841 + }, + { + "epoch": 0.6537096641082776, + "grad_norm": 3.4238355388105277, + "learning_rate": 8.486325757337741e-07, + "loss": 0.6101, + "step": 15842 + }, + { + "epoch": 0.6537509284476356, + "grad_norm": 5.301595011720902, + "learning_rate": 8.484519938029466e-07, + "loss": 0.5693, + "step": 15843 + }, + { + "epoch": 0.6537921927869935, + "grad_norm": 3.144205275425672, + "learning_rate": 8.482714235103967e-07, + "loss": 0.5078, + "step": 15844 + }, + { + "epoch": 0.6538334571263514, + "grad_norm": 2.484105366480604, + "learning_rate": 8.480908648593491e-07, + "loss": 0.5754, + "step": 15845 + }, + { + "epoch": 0.6538747214657094, + "grad_norm": 2.248288276377974, + "learning_rate": 8.479103178530295e-07, + "loss": 0.4923, + "step": 15846 + }, + { + "epoch": 0.6539159858050673, + "grad_norm": 2.8144415886458716, + "learning_rate": 8.477297824946635e-07, + "loss": 0.4659, + "step": 15847 + }, + { + "epoch": 0.6539572501444252, + "grad_norm": 4.180784904511334, + "learning_rate": 8.475492587874748e-07, + "loss": 0.5768, + "step": 15848 + }, + { + "epoch": 0.6539985144837831, + "grad_norm": 3.2709576736069224, + "learning_rate": 8.473687467346883e-07, + "loss": 0.486, + "step": 15849 + }, + { + "epoch": 0.654039778823141, + "grad_norm": 2.75080044460656, + "learning_rate": 8.471882463395288e-07, + "loss": 0.5201, + "step": 15850 + }, + { + "epoch": 0.6540810431624989, + "grad_norm": 2.0821189239338205, + "learning_rate": 8.470077576052209e-07, + "loss": 0.5298, + "step": 15851 + }, + { + "epoch": 0.6541223075018568, + "grad_norm": 3.2492131905574504, + "learning_rate": 8.468272805349872e-07, + "loss": 0.5051, + "step": 15852 + }, + { + "epoch": 0.6541635718412149, + "grad_norm": 3.5338435312544623, + "learning_rate": 8.466468151320523e-07, + "loss": 0.5298, + "step": 15853 + }, + { + "epoch": 0.6542048361805728, + "grad_norm": 2.7795700348881187, + "learning_rate": 8.464663613996405e-07, + "loss": 0.4878, + "step": 15854 + }, + { + "epoch": 0.6542461005199307, + "grad_norm": 4.6761186036616555, + "learning_rate": 8.462859193409737e-07, + "loss": 0.4689, + "step": 15855 + }, + { + "epoch": 0.6542873648592886, + "grad_norm": 2.7247738662207395, + "learning_rate": 8.461054889592762e-07, + "loss": 0.5312, + "step": 15856 + }, + { + "epoch": 0.6543286291986465, + "grad_norm": 2.531444659749189, + "learning_rate": 8.4592507025777e-07, + "loss": 0.5022, + "step": 15857 + }, + { + "epoch": 0.6543698935380045, + "grad_norm": 3.544085962067833, + "learning_rate": 8.457446632396783e-07, + "loss": 0.5087, + "step": 15858 + }, + { + "epoch": 0.6544111578773624, + "grad_norm": 2.032936479086334, + "learning_rate": 8.455642679082235e-07, + "loss": 0.5138, + "step": 15859 + }, + { + "epoch": 0.6544524222167203, + "grad_norm": 2.513649531750172, + "learning_rate": 8.453838842666287e-07, + "loss": 0.555, + "step": 15860 + }, + { + "epoch": 0.6544936865560782, + "grad_norm": 2.6914934670767283, + "learning_rate": 8.452035123181149e-07, + "loss": 0.5474, + "step": 15861 + }, + { + "epoch": 0.6545349508954361, + "grad_norm": 5.335460856961719, + "learning_rate": 8.450231520659041e-07, + "loss": 0.5233, + "step": 15862 + }, + { + "epoch": 0.6545762152347941, + "grad_norm": 2.97286403867795, + "learning_rate": 8.448428035132184e-07, + "loss": 0.4588, + "step": 15863 + }, + { + "epoch": 0.6546174795741521, + "grad_norm": 3.950646883929776, + "learning_rate": 8.4466246666328e-07, + "loss": 0.4734, + "step": 15864 + }, + { + "epoch": 0.65465874391351, + "grad_norm": 3.418032258895144, + "learning_rate": 8.444821415193083e-07, + "loss": 0.5146, + "step": 15865 + }, + { + "epoch": 0.6547000082528679, + "grad_norm": 4.178179473838199, + "learning_rate": 8.443018280845257e-07, + "loss": 0.4573, + "step": 15866 + }, + { + "epoch": 0.6547412725922258, + "grad_norm": 3.016904155630787, + "learning_rate": 8.441215263621534e-07, + "loss": 0.5216, + "step": 15867 + }, + { + "epoch": 0.6547825369315837, + "grad_norm": 2.9402944757453464, + "learning_rate": 8.439412363554113e-07, + "loss": 0.5094, + "step": 15868 + }, + { + "epoch": 0.6548238012709416, + "grad_norm": 2.9920215325137884, + "learning_rate": 8.437609580675194e-07, + "loss": 0.4807, + "step": 15869 + }, + { + "epoch": 0.6548650656102996, + "grad_norm": 2.3460854428681075, + "learning_rate": 8.435806915016985e-07, + "loss": 0.4845, + "step": 15870 + }, + { + "epoch": 0.6549063299496575, + "grad_norm": 16.42238446618759, + "learning_rate": 8.434004366611682e-07, + "loss": 0.5116, + "step": 15871 + }, + { + "epoch": 0.6549475942890154, + "grad_norm": 2.475065791612618, + "learning_rate": 8.432201935491496e-07, + "loss": 0.4953, + "step": 15872 + }, + { + "epoch": 0.6549888586283733, + "grad_norm": 3.1341415563680335, + "learning_rate": 8.430399621688608e-07, + "loss": 0.5401, + "step": 15873 + }, + { + "epoch": 0.6550301229677313, + "grad_norm": 2.3395481296804634, + "learning_rate": 8.428597425235217e-07, + "loss": 0.5357, + "step": 15874 + }, + { + "epoch": 0.6550713873070892, + "grad_norm": 2.6600504322114245, + "learning_rate": 8.426795346163513e-07, + "loss": 0.4465, + "step": 15875 + }, + { + "epoch": 0.6551126516464472, + "grad_norm": 5.464781072481223, + "learning_rate": 8.424993384505696e-07, + "loss": 0.5702, + "step": 15876 + }, + { + "epoch": 0.6551539159858051, + "grad_norm": 6.544430218545735, + "learning_rate": 8.423191540293937e-07, + "loss": 0.5295, + "step": 15877 + }, + { + "epoch": 0.655195180325163, + "grad_norm": 2.035857555680759, + "learning_rate": 8.421389813560433e-07, + "loss": 0.4871, + "step": 15878 + }, + { + "epoch": 0.6552364446645209, + "grad_norm": 2.4394813198981495, + "learning_rate": 8.419588204337361e-07, + "loss": 0.5642, + "step": 15879 + }, + { + "epoch": 0.6552777090038788, + "grad_norm": 11.483964202953791, + "learning_rate": 8.417786712656912e-07, + "loss": 0.5319, + "step": 15880 + }, + { + "epoch": 0.6553189733432367, + "grad_norm": 27.537640474910145, + "learning_rate": 8.415985338551259e-07, + "loss": 0.5501, + "step": 15881 + }, + { + "epoch": 0.6553602376825947, + "grad_norm": 2.12311480533428, + "learning_rate": 8.414184082052574e-07, + "loss": 0.5288, + "step": 15882 + }, + { + "epoch": 0.6554015020219526, + "grad_norm": 2.785267636211975, + "learning_rate": 8.412382943193033e-07, + "loss": 0.55, + "step": 15883 + }, + { + "epoch": 0.6554427663613106, + "grad_norm": 3.5749320753468123, + "learning_rate": 8.410581922004822e-07, + "loss": 0.6071, + "step": 15884 + }, + { + "epoch": 0.6554840307006685, + "grad_norm": 2.3692656925087285, + "learning_rate": 8.408781018520094e-07, + "loss": 0.4782, + "step": 15885 + }, + { + "epoch": 0.6555252950400264, + "grad_norm": 2.0818695663915565, + "learning_rate": 8.406980232771027e-07, + "loss": 0.5282, + "step": 15886 + }, + { + "epoch": 0.6555665593793844, + "grad_norm": 2.891736216248419, + "learning_rate": 8.405179564789785e-07, + "loss": 0.5473, + "step": 15887 + }, + { + "epoch": 0.6556078237187423, + "grad_norm": 4.865981338470325, + "learning_rate": 8.403379014608539e-07, + "loss": 0.4831, + "step": 15888 + }, + { + "epoch": 0.6556490880581002, + "grad_norm": 2.2473158517128815, + "learning_rate": 8.401578582259441e-07, + "loss": 0.5781, + "step": 15889 + }, + { + "epoch": 0.6556903523974581, + "grad_norm": 2.8368724156865732, + "learning_rate": 8.399778267774657e-07, + "loss": 0.5544, + "step": 15890 + }, + { + "epoch": 0.655731616736816, + "grad_norm": 3.0426444753831734, + "learning_rate": 8.397978071186342e-07, + "loss": 0.5449, + "step": 15891 + }, + { + "epoch": 0.6557728810761739, + "grad_norm": 3.118458118391739, + "learning_rate": 8.396177992526663e-07, + "loss": 0.5062, + "step": 15892 + }, + { + "epoch": 0.6558141454155318, + "grad_norm": 4.271578031276643, + "learning_rate": 8.394378031827765e-07, + "loss": 0.5084, + "step": 15893 + }, + { + "epoch": 0.6558554097548899, + "grad_norm": 2.348618230393157, + "learning_rate": 8.392578189121792e-07, + "loss": 0.5363, + "step": 15894 + }, + { + "epoch": 0.6558966740942478, + "grad_norm": 2.409972526332265, + "learning_rate": 8.390778464440904e-07, + "loss": 0.4823, + "step": 15895 + }, + { + "epoch": 0.6559379384336057, + "grad_norm": 3.8525421971732694, + "learning_rate": 8.388978857817246e-07, + "loss": 0.5646, + "step": 15896 + }, + { + "epoch": 0.6559792027729636, + "grad_norm": 4.243298213644193, + "learning_rate": 8.387179369282969e-07, + "loss": 0.5385, + "step": 15897 + }, + { + "epoch": 0.6560204671123215, + "grad_norm": 2.89063299106544, + "learning_rate": 8.385379998870206e-07, + "loss": 0.5041, + "step": 15898 + }, + { + "epoch": 0.6560617314516795, + "grad_norm": 4.215226110858562, + "learning_rate": 8.383580746611103e-07, + "loss": 0.4939, + "step": 15899 + }, + { + "epoch": 0.6561029957910374, + "grad_norm": 2.644477596947506, + "learning_rate": 8.381781612537799e-07, + "loss": 0.5379, + "step": 15900 + }, + { + "epoch": 0.6561442601303953, + "grad_norm": 4.081825809604595, + "learning_rate": 8.37998259668244e-07, + "loss": 0.4953, + "step": 15901 + }, + { + "epoch": 0.6561855244697532, + "grad_norm": 2.7913565309141326, + "learning_rate": 8.378183699077144e-07, + "loss": 0.46, + "step": 15902 + }, + { + "epoch": 0.6562267888091111, + "grad_norm": 2.345218520642221, + "learning_rate": 8.376384919754055e-07, + "loss": 0.5369, + "step": 15903 + }, + { + "epoch": 0.6562680531484691, + "grad_norm": 7.338086349542697, + "learning_rate": 8.374586258745297e-07, + "loss": 0.5806, + "step": 15904 + }, + { + "epoch": 0.6563093174878271, + "grad_norm": 2.588676512646885, + "learning_rate": 8.372787716083012e-07, + "loss": 0.5176, + "step": 15905 + }, + { + "epoch": 0.656350581827185, + "grad_norm": 2.016147421496325, + "learning_rate": 8.370989291799314e-07, + "loss": 0.5123, + "step": 15906 + }, + { + "epoch": 0.6563918461665429, + "grad_norm": 3.8062188464203284, + "learning_rate": 8.369190985926329e-07, + "loss": 0.4979, + "step": 15907 + }, + { + "epoch": 0.6564331105059008, + "grad_norm": 2.116007831203774, + "learning_rate": 8.367392798496177e-07, + "loss": 0.4883, + "step": 15908 + }, + { + "epoch": 0.6564743748452587, + "grad_norm": 5.640238766633465, + "learning_rate": 8.36559472954099e-07, + "loss": 0.5396, + "step": 15909 + }, + { + "epoch": 0.6565156391846166, + "grad_norm": 2.4141879616488615, + "learning_rate": 8.363796779092871e-07, + "loss": 0.4919, + "step": 15910 + }, + { + "epoch": 0.6565569035239746, + "grad_norm": 2.1219758957836827, + "learning_rate": 8.361998947183944e-07, + "loss": 0.505, + "step": 15911 + }, + { + "epoch": 0.6565981678633325, + "grad_norm": 5.164538070610742, + "learning_rate": 8.360201233846318e-07, + "loss": 0.5299, + "step": 15912 + }, + { + "epoch": 0.6566394322026904, + "grad_norm": 7.743835903165073, + "learning_rate": 8.358403639112118e-07, + "loss": 0.5684, + "step": 15913 + }, + { + "epoch": 0.6566806965420484, + "grad_norm": 4.75913979928654, + "learning_rate": 8.356606163013436e-07, + "loss": 0.5011, + "step": 15914 + }, + { + "epoch": 0.6567219608814063, + "grad_norm": 4.5550169122965585, + "learning_rate": 8.354808805582388e-07, + "loss": 0.6004, + "step": 15915 + }, + { + "epoch": 0.6567632252207642, + "grad_norm": 2.380436070627019, + "learning_rate": 8.353011566851078e-07, + "loss": 0.4386, + "step": 15916 + }, + { + "epoch": 0.6568044895601222, + "grad_norm": 2.3258622086909133, + "learning_rate": 8.351214446851616e-07, + "loss": 0.5173, + "step": 15917 + }, + { + "epoch": 0.6568457538994801, + "grad_norm": 2.5698520937608516, + "learning_rate": 8.349417445616091e-07, + "loss": 0.4806, + "step": 15918 + }, + { + "epoch": 0.656887018238838, + "grad_norm": 2.744040045655673, + "learning_rate": 8.347620563176612e-07, + "loss": 0.4715, + "step": 15919 + }, + { + "epoch": 0.6569282825781959, + "grad_norm": 10.382511455925004, + "learning_rate": 8.345823799565266e-07, + "loss": 0.4829, + "step": 15920 + }, + { + "epoch": 0.6569695469175538, + "grad_norm": 6.203677293916019, + "learning_rate": 8.344027154814154e-07, + "loss": 0.499, + "step": 15921 + }, + { + "epoch": 0.6570108112569117, + "grad_norm": 2.940705585804702, + "learning_rate": 8.342230628955374e-07, + "loss": 0.5203, + "step": 15922 + }, + { + "epoch": 0.6570520755962697, + "grad_norm": 7.714222999938198, + "learning_rate": 8.340434222021002e-07, + "loss": 0.4971, + "step": 15923 + }, + { + "epoch": 0.6570933399356277, + "grad_norm": 5.788603447587822, + "learning_rate": 8.338637934043135e-07, + "loss": 0.487, + "step": 15924 + }, + { + "epoch": 0.6571346042749856, + "grad_norm": 2.471465546948729, + "learning_rate": 8.336841765053867e-07, + "loss": 0.4639, + "step": 15925 + }, + { + "epoch": 0.6571758686143435, + "grad_norm": 2.426928232004767, + "learning_rate": 8.335045715085265e-07, + "loss": 0.5308, + "step": 15926 + }, + { + "epoch": 0.6572171329537014, + "grad_norm": 5.2185872314758495, + "learning_rate": 8.333249784169423e-07, + "loss": 0.4451, + "step": 15927 + }, + { + "epoch": 0.6572583972930593, + "grad_norm": 2.8109940585927555, + "learning_rate": 8.331453972338415e-07, + "loss": 0.5177, + "step": 15928 + }, + { + "epoch": 0.6572996616324173, + "grad_norm": 4.693036510928704, + "learning_rate": 8.329658279624327e-07, + "loss": 0.4725, + "step": 15929 + }, + { + "epoch": 0.6573409259717752, + "grad_norm": 3.5515494822385767, + "learning_rate": 8.327862706059223e-07, + "loss": 0.4846, + "step": 15930 + }, + { + "epoch": 0.6573821903111331, + "grad_norm": 5.009127487573651, + "learning_rate": 8.326067251675183e-07, + "loss": 0.5086, + "step": 15931 + }, + { + "epoch": 0.657423454650491, + "grad_norm": 6.712069700447499, + "learning_rate": 8.324271916504283e-07, + "loss": 0.5264, + "step": 15932 + }, + { + "epoch": 0.6574647189898489, + "grad_norm": 3.219480800396996, + "learning_rate": 8.32247670057858e-07, + "loss": 0.5297, + "step": 15933 + }, + { + "epoch": 0.6575059833292068, + "grad_norm": 3.2400433159576956, + "learning_rate": 8.320681603930157e-07, + "loss": 0.5446, + "step": 15934 + }, + { + "epoch": 0.6575472476685649, + "grad_norm": 3.1773945559472425, + "learning_rate": 8.318886626591059e-07, + "loss": 0.4609, + "step": 15935 + }, + { + "epoch": 0.6575885120079228, + "grad_norm": 3.196518712038705, + "learning_rate": 8.317091768593363e-07, + "loss": 0.5483, + "step": 15936 + }, + { + "epoch": 0.6576297763472807, + "grad_norm": 2.0650313188244893, + "learning_rate": 8.315297029969124e-07, + "loss": 0.4705, + "step": 15937 + }, + { + "epoch": 0.6576710406866386, + "grad_norm": 7.244794556862398, + "learning_rate": 8.31350241075041e-07, + "loss": 0.5439, + "step": 15938 + }, + { + "epoch": 0.6577123050259965, + "grad_norm": 2.2904100814854584, + "learning_rate": 8.311707910969263e-07, + "loss": 0.5224, + "step": 15939 + }, + { + "epoch": 0.6577535693653545, + "grad_norm": 8.36711715655341, + "learning_rate": 8.309913530657745e-07, + "loss": 0.4965, + "step": 15940 + }, + { + "epoch": 0.6577948337047124, + "grad_norm": 3.483017219766174, + "learning_rate": 8.308119269847906e-07, + "loss": 0.5129, + "step": 15941 + }, + { + "epoch": 0.6578360980440703, + "grad_norm": 9.303677999200378, + "learning_rate": 8.306325128571805e-07, + "loss": 0.515, + "step": 15942 + }, + { + "epoch": 0.6578773623834282, + "grad_norm": 2.2682862778823076, + "learning_rate": 8.304531106861475e-07, + "loss": 0.5071, + "step": 15943 + }, + { + "epoch": 0.6579186267227861, + "grad_norm": 6.426928297098727, + "learning_rate": 8.302737204748972e-07, + "loss": 0.5449, + "step": 15944 + }, + { + "epoch": 0.6579598910621441, + "grad_norm": 3.5441452470507, + "learning_rate": 8.300943422266337e-07, + "loss": 0.4745, + "step": 15945 + }, + { + "epoch": 0.6580011554015021, + "grad_norm": 3.9916293604162796, + "learning_rate": 8.299149759445615e-07, + "loss": 0.5094, + "step": 15946 + }, + { + "epoch": 0.65804241974086, + "grad_norm": 2.0972004167185547, + "learning_rate": 8.297356216318834e-07, + "loss": 0.4859, + "step": 15947 + }, + { + "epoch": 0.6580836840802179, + "grad_norm": 2.2279155714876486, + "learning_rate": 8.295562792918038e-07, + "loss": 0.4961, + "step": 15948 + }, + { + "epoch": 0.6581249484195758, + "grad_norm": 5.557500012149392, + "learning_rate": 8.293769489275265e-07, + "loss": 0.5271, + "step": 15949 + }, + { + "epoch": 0.6581662127589337, + "grad_norm": 9.30153956408211, + "learning_rate": 8.29197630542255e-07, + "loss": 0.5374, + "step": 15950 + }, + { + "epoch": 0.6582074770982916, + "grad_norm": 3.9677205549476295, + "learning_rate": 8.29018324139191e-07, + "loss": 0.5084, + "step": 15951 + }, + { + "epoch": 0.6582487414376496, + "grad_norm": 2.823450386542945, + "learning_rate": 8.288390297215387e-07, + "loss": 0.4822, + "step": 15952 + }, + { + "epoch": 0.6582900057770075, + "grad_norm": 8.01845622897737, + "learning_rate": 8.286597472925002e-07, + "loss": 0.5489, + "step": 15953 + }, + { + "epoch": 0.6583312701163654, + "grad_norm": 4.392622788456623, + "learning_rate": 8.284804768552785e-07, + "loss": 0.5442, + "step": 15954 + }, + { + "epoch": 0.6583725344557234, + "grad_norm": 3.018467295854253, + "learning_rate": 8.283012184130748e-07, + "loss": 0.516, + "step": 15955 + }, + { + "epoch": 0.6584137987950813, + "grad_norm": 2.9297750101151308, + "learning_rate": 8.281219719690917e-07, + "loss": 0.5588, + "step": 15956 + }, + { + "epoch": 0.6584550631344392, + "grad_norm": 4.8512554473373, + "learning_rate": 8.27942737526531e-07, + "loss": 0.5411, + "step": 15957 + }, + { + "epoch": 0.6584963274737972, + "grad_norm": 2.25421693100336, + "learning_rate": 8.277635150885947e-07, + "loss": 0.4823, + "step": 15958 + }, + { + "epoch": 0.6585375918131551, + "grad_norm": 3.077724198508947, + "learning_rate": 8.27584304658484e-07, + "loss": 0.4871, + "step": 15959 + }, + { + "epoch": 0.658578856152513, + "grad_norm": 6.557799781016711, + "learning_rate": 8.274051062393988e-07, + "loss": 0.5295, + "step": 15960 + }, + { + "epoch": 0.6586201204918709, + "grad_norm": 2.242865654036094, + "learning_rate": 8.272259198345412e-07, + "loss": 0.5725, + "step": 15961 + }, + { + "epoch": 0.6586613848312288, + "grad_norm": 3.9579043892731622, + "learning_rate": 8.270467454471123e-07, + "loss": 0.5094, + "step": 15962 + }, + { + "epoch": 0.6587026491705867, + "grad_norm": 2.3732941923592024, + "learning_rate": 8.268675830803113e-07, + "loss": 0.4778, + "step": 15963 + }, + { + "epoch": 0.6587439135099447, + "grad_norm": 4.479380422973849, + "learning_rate": 8.266884327373391e-07, + "loss": 0.5038, + "step": 15964 + }, + { + "epoch": 0.6587851778493027, + "grad_norm": 3.053754965975539, + "learning_rate": 8.265092944213961e-07, + "loss": 0.4868, + "step": 15965 + }, + { + "epoch": 0.6588264421886606, + "grad_norm": 3.2483170058394824, + "learning_rate": 8.263301681356825e-07, + "loss": 0.5101, + "step": 15966 + }, + { + "epoch": 0.6588677065280185, + "grad_norm": 2.898450960681037, + "learning_rate": 8.261510538833965e-07, + "loss": 0.4957, + "step": 15967 + }, + { + "epoch": 0.6589089708673764, + "grad_norm": 2.3160028495727287, + "learning_rate": 8.259719516677387e-07, + "loss": 0.4798, + "step": 15968 + }, + { + "epoch": 0.6589502352067343, + "grad_norm": 1.9099815598408274, + "learning_rate": 8.257928614919079e-07, + "loss": 0.5479, + "step": 15969 + }, + { + "epoch": 0.6589914995460923, + "grad_norm": 2.7676389121253693, + "learning_rate": 8.256137833591038e-07, + "loss": 0.518, + "step": 15970 + }, + { + "epoch": 0.6590327638854502, + "grad_norm": 2.2761324959402955, + "learning_rate": 8.254347172725239e-07, + "loss": 0.4988, + "step": 15971 + }, + { + "epoch": 0.6590740282248081, + "grad_norm": 7.767083926171698, + "learning_rate": 8.252556632353681e-07, + "loss": 0.5185, + "step": 15972 + }, + { + "epoch": 0.659115292564166, + "grad_norm": 2.0252283864506455, + "learning_rate": 8.250766212508335e-07, + "loss": 0.5307, + "step": 15973 + }, + { + "epoch": 0.6591565569035239, + "grad_norm": 2.0154927128196363, + "learning_rate": 8.248975913221188e-07, + "loss": 0.5002, + "step": 15974 + }, + { + "epoch": 0.659197821242882, + "grad_norm": 5.040378924488117, + "learning_rate": 8.247185734524225e-07, + "loss": 0.5396, + "step": 15975 + }, + { + "epoch": 0.6592390855822399, + "grad_norm": 2.282482720253236, + "learning_rate": 8.245395676449413e-07, + "loss": 0.509, + "step": 15976 + }, + { + "epoch": 0.6592803499215978, + "grad_norm": 2.8522039117448483, + "learning_rate": 8.243605739028731e-07, + "loss": 0.5457, + "step": 15977 + }, + { + "epoch": 0.6593216142609557, + "grad_norm": 2.277077596178565, + "learning_rate": 8.241815922294154e-07, + "loss": 0.4882, + "step": 15978 + }, + { + "epoch": 0.6593628786003136, + "grad_norm": 11.170053671456525, + "learning_rate": 8.240026226277655e-07, + "loss": 0.4996, + "step": 15979 + }, + { + "epoch": 0.6594041429396715, + "grad_norm": 3.9327947153842837, + "learning_rate": 8.238236651011195e-07, + "loss": 0.4965, + "step": 15980 + }, + { + "epoch": 0.6594454072790294, + "grad_norm": 2.6575172742584763, + "learning_rate": 8.236447196526742e-07, + "loss": 0.506, + "step": 15981 + }, + { + "epoch": 0.6594866716183874, + "grad_norm": 4.091513161161287, + "learning_rate": 8.234657862856261e-07, + "loss": 0.4875, + "step": 15982 + }, + { + "epoch": 0.6595279359577453, + "grad_norm": 3.4295949972601254, + "learning_rate": 8.232868650031723e-07, + "loss": 0.5094, + "step": 15983 + }, + { + "epoch": 0.6595692002971032, + "grad_norm": 2.3325515207801346, + "learning_rate": 8.231079558085073e-07, + "loss": 0.5143, + "step": 15984 + }, + { + "epoch": 0.6596104646364612, + "grad_norm": 2.7308671018326334, + "learning_rate": 8.22929058704828e-07, + "loss": 0.4826, + "step": 15985 + }, + { + "epoch": 0.6596517289758191, + "grad_norm": 3.4586023035350473, + "learning_rate": 8.22750173695329e-07, + "loss": 0.6039, + "step": 15986 + }, + { + "epoch": 0.6596929933151771, + "grad_norm": 3.905974386269496, + "learning_rate": 8.225713007832066e-07, + "loss": 0.5545, + "step": 15987 + }, + { + "epoch": 0.659734257654535, + "grad_norm": 3.1230543104277735, + "learning_rate": 8.22392439971655e-07, + "loss": 0.5852, + "step": 15988 + }, + { + "epoch": 0.6597755219938929, + "grad_norm": 3.460911626134086, + "learning_rate": 8.222135912638694e-07, + "loss": 0.5421, + "step": 15989 + }, + { + "epoch": 0.6598167863332508, + "grad_norm": 5.28025577661337, + "learning_rate": 8.220347546630447e-07, + "loss": 0.5722, + "step": 15990 + }, + { + "epoch": 0.6598580506726087, + "grad_norm": 3.619430376529173, + "learning_rate": 8.218559301723759e-07, + "loss": 0.4906, + "step": 15991 + }, + { + "epoch": 0.6598993150119666, + "grad_norm": 10.993931827737713, + "learning_rate": 8.216771177950559e-07, + "loss": 0.4735, + "step": 15992 + }, + { + "epoch": 0.6599405793513246, + "grad_norm": 3.102858612442985, + "learning_rate": 8.214983175342797e-07, + "loss": 0.5061, + "step": 15993 + }, + { + "epoch": 0.6599818436906825, + "grad_norm": 3.263099116127798, + "learning_rate": 8.213195293932407e-07, + "loss": 0.4973, + "step": 15994 + }, + { + "epoch": 0.6600231080300404, + "grad_norm": 3.859800041690524, + "learning_rate": 8.211407533751335e-07, + "loss": 0.4923, + "step": 15995 + }, + { + "epoch": 0.6600643723693984, + "grad_norm": 2.6568824230131014, + "learning_rate": 8.209619894831502e-07, + "loss": 0.4491, + "step": 15996 + }, + { + "epoch": 0.6601056367087563, + "grad_norm": 2.7438975036428963, + "learning_rate": 8.207832377204841e-07, + "loss": 0.4995, + "step": 15997 + }, + { + "epoch": 0.6601469010481142, + "grad_norm": 3.4006430326014807, + "learning_rate": 8.206044980903293e-07, + "loss": 0.5323, + "step": 15998 + }, + { + "epoch": 0.6601881653874722, + "grad_norm": 3.0080594691400164, + "learning_rate": 8.204257705958772e-07, + "loss": 0.4998, + "step": 15999 + }, + { + "epoch": 0.6602294297268301, + "grad_norm": 2.708802343519437, + "learning_rate": 8.202470552403212e-07, + "loss": 0.5786, + "step": 16000 + }, + { + "epoch": 0.660270694066188, + "grad_norm": 7.1093090899416875, + "learning_rate": 8.200683520268528e-07, + "loss": 0.4825, + "step": 16001 + }, + { + "epoch": 0.6603119584055459, + "grad_norm": 1.9505154105224591, + "learning_rate": 8.198896609586647e-07, + "loss": 0.5386, + "step": 16002 + }, + { + "epoch": 0.6603532227449038, + "grad_norm": 1.971225456843679, + "learning_rate": 8.197109820389492e-07, + "loss": 0.4852, + "step": 16003 + }, + { + "epoch": 0.6603944870842617, + "grad_norm": 4.4707382610227935, + "learning_rate": 8.195323152708969e-07, + "loss": 0.5023, + "step": 16004 + }, + { + "epoch": 0.6604357514236197, + "grad_norm": 3.8475543141617745, + "learning_rate": 8.193536606576994e-07, + "loss": 0.5272, + "step": 16005 + }, + { + "epoch": 0.6604770157629777, + "grad_norm": 5.820020806185642, + "learning_rate": 8.191750182025484e-07, + "loss": 0.5137, + "step": 16006 + }, + { + "epoch": 0.6605182801023356, + "grad_norm": 2.0731738905830515, + "learning_rate": 8.189963879086354e-07, + "loss": 0.5136, + "step": 16007 + }, + { + "epoch": 0.6605595444416935, + "grad_norm": 3.424251344164784, + "learning_rate": 8.188177697791499e-07, + "loss": 0.4993, + "step": 16008 + }, + { + "epoch": 0.6606008087810514, + "grad_norm": 2.985689754148826, + "learning_rate": 8.18639163817283e-07, + "loss": 0.5064, + "step": 16009 + }, + { + "epoch": 0.6606420731204093, + "grad_norm": 2.3760088894161364, + "learning_rate": 8.184605700262251e-07, + "loss": 0.5506, + "step": 16010 + }, + { + "epoch": 0.6606833374597673, + "grad_norm": 7.595256006304843, + "learning_rate": 8.18281988409167e-07, + "loss": 0.548, + "step": 16011 + }, + { + "epoch": 0.6607246017991252, + "grad_norm": 2.664889435449972, + "learning_rate": 8.181034189692982e-07, + "loss": 0.4913, + "step": 16012 + }, + { + "epoch": 0.6607658661384831, + "grad_norm": 1.7476223489866929, + "learning_rate": 8.179248617098074e-07, + "loss": 0.4615, + "step": 16013 + }, + { + "epoch": 0.660807130477841, + "grad_norm": 5.351697130136494, + "learning_rate": 8.177463166338847e-07, + "loss": 0.5204, + "step": 16014 + }, + { + "epoch": 0.6608483948171989, + "grad_norm": 5.617613698889823, + "learning_rate": 8.175677837447198e-07, + "loss": 0.5037, + "step": 16015 + }, + { + "epoch": 0.660889659156557, + "grad_norm": 3.214603386046011, + "learning_rate": 8.173892630455021e-07, + "loss": 0.5236, + "step": 16016 + }, + { + "epoch": 0.6609309234959149, + "grad_norm": 2.994300831832387, + "learning_rate": 8.17210754539419e-07, + "loss": 0.4742, + "step": 16017 + }, + { + "epoch": 0.6609721878352728, + "grad_norm": 3.351055525253746, + "learning_rate": 8.1703225822966e-07, + "loss": 0.5088, + "step": 16018 + }, + { + "epoch": 0.6610134521746307, + "grad_norm": 3.1745473416267074, + "learning_rate": 8.168537741194133e-07, + "loss": 0.5213, + "step": 16019 + }, + { + "epoch": 0.6610547165139886, + "grad_norm": 6.408923190492147, + "learning_rate": 8.166753022118679e-07, + "loss": 0.5291, + "step": 16020 + }, + { + "epoch": 0.6610959808533465, + "grad_norm": 3.23691139874657, + "learning_rate": 8.164968425102104e-07, + "loss": 0.5058, + "step": 16021 + }, + { + "epoch": 0.6611372451927044, + "grad_norm": 3.4597062325403374, + "learning_rate": 8.16318395017629e-07, + "loss": 0.5705, + "step": 16022 + }, + { + "epoch": 0.6611785095320624, + "grad_norm": 2.7273896897820564, + "learning_rate": 8.161399597373121e-07, + "loss": 0.5143, + "step": 16023 + }, + { + "epoch": 0.6612197738714203, + "grad_norm": 7.427193266220627, + "learning_rate": 8.159615366724461e-07, + "loss": 0.469, + "step": 16024 + }, + { + "epoch": 0.6612610382107782, + "grad_norm": 6.426072879560257, + "learning_rate": 8.157831258262178e-07, + "loss": 0.5179, + "step": 16025 + }, + { + "epoch": 0.6613023025501362, + "grad_norm": 4.14372154988017, + "learning_rate": 8.156047272018146e-07, + "loss": 0.5042, + "step": 16026 + }, + { + "epoch": 0.6613435668894941, + "grad_norm": 2.5405710093106872, + "learning_rate": 8.154263408024231e-07, + "loss": 0.4625, + "step": 16027 + }, + { + "epoch": 0.661384831228852, + "grad_norm": 3.0785801746671972, + "learning_rate": 8.152479666312301e-07, + "loss": 0.4951, + "step": 16028 + }, + { + "epoch": 0.66142609556821, + "grad_norm": 1.9344468666027295, + "learning_rate": 8.150696046914208e-07, + "loss": 0.4612, + "step": 16029 + }, + { + "epoch": 0.6614673599075679, + "grad_norm": 4.2937711635450775, + "learning_rate": 8.14891254986182e-07, + "loss": 0.5459, + "step": 16030 + }, + { + "epoch": 0.6615086242469258, + "grad_norm": 2.296044393505205, + "learning_rate": 8.147129175186991e-07, + "loss": 0.5528, + "step": 16031 + }, + { + "epoch": 0.6615498885862837, + "grad_norm": 2.4134285664312998, + "learning_rate": 8.145345922921583e-07, + "loss": 0.5131, + "step": 16032 + }, + { + "epoch": 0.6615911529256416, + "grad_norm": 3.240443459273021, + "learning_rate": 8.143562793097441e-07, + "loss": 0.5648, + "step": 16033 + }, + { + "epoch": 0.6616324172649996, + "grad_norm": 3.0140950253340315, + "learning_rate": 8.14177978574642e-07, + "loss": 0.5226, + "step": 16034 + }, + { + "epoch": 0.6616736816043575, + "grad_norm": 4.35918222574113, + "learning_rate": 8.139996900900366e-07, + "loss": 0.54, + "step": 16035 + }, + { + "epoch": 0.6617149459437155, + "grad_norm": 1.9789439392092727, + "learning_rate": 8.138214138591136e-07, + "loss": 0.4464, + "step": 16036 + }, + { + "epoch": 0.6617562102830734, + "grad_norm": 3.262170747977095, + "learning_rate": 8.136431498850562e-07, + "loss": 0.5133, + "step": 16037 + }, + { + "epoch": 0.6617974746224313, + "grad_norm": 8.13792795019367, + "learning_rate": 8.134648981710495e-07, + "loss": 0.579, + "step": 16038 + }, + { + "epoch": 0.6618387389617892, + "grad_norm": 2.782966951061756, + "learning_rate": 8.132866587202768e-07, + "loss": 0.4774, + "step": 16039 + }, + { + "epoch": 0.6618800033011472, + "grad_norm": 2.9923404982857984, + "learning_rate": 8.131084315359228e-07, + "loss": 0.5179, + "step": 16040 + }, + { + "epoch": 0.6619212676405051, + "grad_norm": 3.5328924651499722, + "learning_rate": 8.129302166211699e-07, + "loss": 0.5349, + "step": 16041 + }, + { + "epoch": 0.661962531979863, + "grad_norm": 3.443892463856652, + "learning_rate": 8.127520139792021e-07, + "loss": 0.5396, + "step": 16042 + }, + { + "epoch": 0.6620037963192209, + "grad_norm": 3.413104604138116, + "learning_rate": 8.125738236132027e-07, + "loss": 0.5005, + "step": 16043 + }, + { + "epoch": 0.6620450606585788, + "grad_norm": 4.299818623041667, + "learning_rate": 8.123956455263552e-07, + "loss": 0.4973, + "step": 16044 + }, + { + "epoch": 0.6620863249979367, + "grad_norm": 2.9906347516592295, + "learning_rate": 8.122174797218406e-07, + "loss": 0.5109, + "step": 16045 + }, + { + "epoch": 0.6621275893372948, + "grad_norm": 2.4848871024035892, + "learning_rate": 8.120393262028427e-07, + "loss": 0.4698, + "step": 16046 + }, + { + "epoch": 0.6621688536766527, + "grad_norm": 2.2772079352153924, + "learning_rate": 8.118611849725433e-07, + "loss": 0.4665, + "step": 16047 + }, + { + "epoch": 0.6622101180160106, + "grad_norm": 9.268717677049818, + "learning_rate": 8.116830560341252e-07, + "loss": 0.4631, + "step": 16048 + }, + { + "epoch": 0.6622513823553685, + "grad_norm": 2.5644172007449453, + "learning_rate": 8.11504939390769e-07, + "loss": 0.4796, + "step": 16049 + }, + { + "epoch": 0.6622926466947264, + "grad_norm": 6.049095983834985, + "learning_rate": 8.113268350456569e-07, + "loss": 0.5129, + "step": 16050 + }, + { + "epoch": 0.6623339110340843, + "grad_norm": 2.5265741511710393, + "learning_rate": 8.11148743001971e-07, + "loss": 0.5389, + "step": 16051 + }, + { + "epoch": 0.6623751753734423, + "grad_norm": 2.532194419685648, + "learning_rate": 8.109706632628913e-07, + "loss": 0.5165, + "step": 16052 + }, + { + "epoch": 0.6624164397128002, + "grad_norm": 3.542024631053991, + "learning_rate": 8.107925958315995e-07, + "loss": 0.5206, + "step": 16053 + }, + { + "epoch": 0.6624577040521581, + "grad_norm": 2.461214125930691, + "learning_rate": 8.106145407112758e-07, + "loss": 0.5308, + "step": 16054 + }, + { + "epoch": 0.662498968391516, + "grad_norm": 2.8912107470472233, + "learning_rate": 8.104364979051009e-07, + "loss": 0.5477, + "step": 16055 + }, + { + "epoch": 0.6625402327308739, + "grad_norm": 3.957900371698895, + "learning_rate": 8.102584674162552e-07, + "loss": 0.5526, + "step": 16056 + }, + { + "epoch": 0.662581497070232, + "grad_norm": 5.603519860197542, + "learning_rate": 8.100804492479193e-07, + "loss": 0.4976, + "step": 16057 + }, + { + "epoch": 0.6626227614095899, + "grad_norm": 4.430777300432162, + "learning_rate": 8.099024434032718e-07, + "loss": 0.4944, + "step": 16058 + }, + { + "epoch": 0.6626640257489478, + "grad_norm": 2.286864890473626, + "learning_rate": 8.097244498854933e-07, + "loss": 0.5408, + "step": 16059 + }, + { + "epoch": 0.6627052900883057, + "grad_norm": 2.44477357197753, + "learning_rate": 8.095464686977627e-07, + "loss": 0.5409, + "step": 16060 + }, + { + "epoch": 0.6627465544276636, + "grad_norm": 3.2345788285103514, + "learning_rate": 8.093684998432602e-07, + "loss": 0.4985, + "step": 16061 + }, + { + "epoch": 0.6627878187670215, + "grad_norm": 2.3112800231712627, + "learning_rate": 8.091905433251629e-07, + "loss": 0.5443, + "step": 16062 + }, + { + "epoch": 0.6628290831063794, + "grad_norm": 2.798358588753211, + "learning_rate": 8.090125991466511e-07, + "loss": 0.5468, + "step": 16063 + }, + { + "epoch": 0.6628703474457374, + "grad_norm": 3.837517083511398, + "learning_rate": 8.088346673109035e-07, + "loss": 0.5178, + "step": 16064 + }, + { + "epoch": 0.6629116117850953, + "grad_norm": 1.7535465516381088, + "learning_rate": 8.086567478210974e-07, + "loss": 0.5211, + "step": 16065 + }, + { + "epoch": 0.6629528761244532, + "grad_norm": 3.901109938814779, + "learning_rate": 8.08478840680411e-07, + "loss": 0.5141, + "step": 16066 + }, + { + "epoch": 0.6629941404638112, + "grad_norm": 2.886338858144713, + "learning_rate": 8.083009458920224e-07, + "loss": 0.4886, + "step": 16067 + }, + { + "epoch": 0.6630354048031691, + "grad_norm": 7.1461845153816075, + "learning_rate": 8.081230634591092e-07, + "loss": 0.4669, + "step": 16068 + }, + { + "epoch": 0.663076669142527, + "grad_norm": 5.403021915175701, + "learning_rate": 8.079451933848494e-07, + "loss": 0.4707, + "step": 16069 + }, + { + "epoch": 0.663117933481885, + "grad_norm": 2.221144541938809, + "learning_rate": 8.077673356724192e-07, + "loss": 0.5321, + "step": 16070 + }, + { + "epoch": 0.6631591978212429, + "grad_norm": 5.04443334418665, + "learning_rate": 8.075894903249961e-07, + "loss": 0.4473, + "step": 16071 + }, + { + "epoch": 0.6632004621606008, + "grad_norm": 2.8694276062299564, + "learning_rate": 8.074116573457568e-07, + "loss": 0.5251, + "step": 16072 + }, + { + "epoch": 0.6632417264999587, + "grad_norm": 2.4177921672579257, + "learning_rate": 8.072338367378784e-07, + "loss": 0.5197, + "step": 16073 + }, + { + "epoch": 0.6632829908393166, + "grad_norm": 2.0414264088461196, + "learning_rate": 8.070560285045363e-07, + "loss": 0.5123, + "step": 16074 + }, + { + "epoch": 0.6633242551786745, + "grad_norm": 2.856112703140445, + "learning_rate": 8.068782326489068e-07, + "loss": 0.5177, + "step": 16075 + }, + { + "epoch": 0.6633655195180325, + "grad_norm": 3.5051321342895063, + "learning_rate": 8.067004491741662e-07, + "loss": 0.4802, + "step": 16076 + }, + { + "epoch": 0.6634067838573905, + "grad_norm": 3.615014560582651, + "learning_rate": 8.065226780834904e-07, + "loss": 0.5058, + "step": 16077 + }, + { + "epoch": 0.6634480481967484, + "grad_norm": 2.2622447204877707, + "learning_rate": 8.063449193800544e-07, + "loss": 0.4843, + "step": 16078 + }, + { + "epoch": 0.6634893125361063, + "grad_norm": 2.120342353668221, + "learning_rate": 8.06167173067033e-07, + "loss": 0.4976, + "step": 16079 + }, + { + "epoch": 0.6635305768754642, + "grad_norm": 2.7847071125691167, + "learning_rate": 8.059894391476012e-07, + "loss": 0.5354, + "step": 16080 + }, + { + "epoch": 0.6635718412148222, + "grad_norm": 3.057873830790209, + "learning_rate": 8.058117176249352e-07, + "loss": 0.536, + "step": 16081 + }, + { + "epoch": 0.6636131055541801, + "grad_norm": 17.57143177345261, + "learning_rate": 8.056340085022076e-07, + "loss": 0.5268, + "step": 16082 + }, + { + "epoch": 0.663654369893538, + "grad_norm": 11.152350681534196, + "learning_rate": 8.054563117825938e-07, + "loss": 0.5174, + "step": 16083 + }, + { + "epoch": 0.6636956342328959, + "grad_norm": 2.87664085890868, + "learning_rate": 8.052786274692677e-07, + "loss": 0.5209, + "step": 16084 + }, + { + "epoch": 0.6637368985722538, + "grad_norm": 2.270288644079651, + "learning_rate": 8.051009555654039e-07, + "loss": 0.442, + "step": 16085 + }, + { + "epoch": 0.6637781629116117, + "grad_norm": 2.631270191155189, + "learning_rate": 8.049232960741747e-07, + "loss": 0.4672, + "step": 16086 + }, + { + "epoch": 0.6638194272509698, + "grad_norm": 2.2126039334237637, + "learning_rate": 8.047456489987543e-07, + "loss": 0.5157, + "step": 16087 + }, + { + "epoch": 0.6638606915903277, + "grad_norm": 2.4786700859002746, + "learning_rate": 8.045680143423159e-07, + "loss": 0.5036, + "step": 16088 + }, + { + "epoch": 0.6639019559296856, + "grad_norm": 14.731696572086385, + "learning_rate": 8.04390392108033e-07, + "loss": 0.4946, + "step": 16089 + }, + { + "epoch": 0.6639432202690435, + "grad_norm": 2.8977386071854823, + "learning_rate": 8.042127822990779e-07, + "loss": 0.522, + "step": 16090 + }, + { + "epoch": 0.6639844846084014, + "grad_norm": 2.3794629030204795, + "learning_rate": 8.040351849186227e-07, + "loss": 0.4621, + "step": 16091 + }, + { + "epoch": 0.6640257489477593, + "grad_norm": 5.850660668233103, + "learning_rate": 8.038575999698399e-07, + "loss": 0.4889, + "step": 16092 + }, + { + "epoch": 0.6640670132871173, + "grad_norm": 2.1203205922093598, + "learning_rate": 8.036800274559018e-07, + "loss": 0.5414, + "step": 16093 + }, + { + "epoch": 0.6641082776264752, + "grad_norm": 3.516508763837173, + "learning_rate": 8.035024673799812e-07, + "loss": 0.5187, + "step": 16094 + }, + { + "epoch": 0.6641495419658331, + "grad_norm": 3.0011431245551186, + "learning_rate": 8.033249197452479e-07, + "loss": 0.529, + "step": 16095 + }, + { + "epoch": 0.664190806305191, + "grad_norm": 2.3514767458672123, + "learning_rate": 8.031473845548747e-07, + "loss": 0.4633, + "step": 16096 + }, + { + "epoch": 0.664232070644549, + "grad_norm": 2.9830181195811303, + "learning_rate": 8.029698618120323e-07, + "loss": 0.5437, + "step": 16097 + }, + { + "epoch": 0.664273334983907, + "grad_norm": 3.3572916698377893, + "learning_rate": 8.027923515198925e-07, + "loss": 0.4926, + "step": 16098 + }, + { + "epoch": 0.6643145993232649, + "grad_norm": 17.01865134807353, + "learning_rate": 8.026148536816249e-07, + "loss": 0.5273, + "step": 16099 + }, + { + "epoch": 0.6643558636626228, + "grad_norm": 2.8527749429993325, + "learning_rate": 8.024373683004004e-07, + "loss": 0.519, + "step": 16100 + }, + { + "epoch": 0.6643971280019807, + "grad_norm": 2.089893751513129, + "learning_rate": 8.022598953793903e-07, + "loss": 0.522, + "step": 16101 + }, + { + "epoch": 0.6644383923413386, + "grad_norm": 1.8562503109788282, + "learning_rate": 8.020824349217634e-07, + "loss": 0.507, + "step": 16102 + }, + { + "epoch": 0.6644796566806965, + "grad_norm": 3.5465587930637237, + "learning_rate": 8.019049869306907e-07, + "loss": 0.5401, + "step": 16103 + }, + { + "epoch": 0.6645209210200544, + "grad_norm": 4.021842787655626, + "learning_rate": 8.017275514093406e-07, + "loss": 0.4714, + "step": 16104 + }, + { + "epoch": 0.6645621853594124, + "grad_norm": 2.7499636850690834, + "learning_rate": 8.015501283608833e-07, + "loss": 0.4513, + "step": 16105 + }, + { + "epoch": 0.6646034496987703, + "grad_norm": 2.2223362009158576, + "learning_rate": 8.013727177884886e-07, + "loss": 0.5384, + "step": 16106 + }, + { + "epoch": 0.6646447140381283, + "grad_norm": 6.212797177413809, + "learning_rate": 8.011953196953242e-07, + "loss": 0.5604, + "step": 16107 + }, + { + "epoch": 0.6646859783774862, + "grad_norm": 4.209340277617237, + "learning_rate": 8.010179340845596e-07, + "loss": 0.518, + "step": 16108 + }, + { + "epoch": 0.6647272427168441, + "grad_norm": 2.923145076440239, + "learning_rate": 8.008405609593633e-07, + "loss": 0.5131, + "step": 16109 + }, + { + "epoch": 0.664768507056202, + "grad_norm": 2.689004792761705, + "learning_rate": 8.006632003229041e-07, + "loss": 0.5258, + "step": 16110 + }, + { + "epoch": 0.66480977139556, + "grad_norm": 5.980628886246972, + "learning_rate": 8.004858521783491e-07, + "loss": 0.4746, + "step": 16111 + }, + { + "epoch": 0.6648510357349179, + "grad_norm": 6.350761023178385, + "learning_rate": 8.003085165288666e-07, + "loss": 0.5219, + "step": 16112 + }, + { + "epoch": 0.6648923000742758, + "grad_norm": 2.066786483924396, + "learning_rate": 8.001311933776244e-07, + "loss": 0.4817, + "step": 16113 + }, + { + "epoch": 0.6649335644136337, + "grad_norm": 3.5042749595842824, + "learning_rate": 7.999538827277907e-07, + "loss": 0.5253, + "step": 16114 + }, + { + "epoch": 0.6649748287529916, + "grad_norm": 3.3838664671720773, + "learning_rate": 7.997765845825308e-07, + "loss": 0.4912, + "step": 16115 + }, + { + "epoch": 0.6650160930923495, + "grad_norm": 2.5610661805584685, + "learning_rate": 7.995992989450133e-07, + "loss": 0.505, + "step": 16116 + }, + { + "epoch": 0.6650573574317075, + "grad_norm": 3.1230683344737478, + "learning_rate": 7.994220258184049e-07, + "loss": 0.5069, + "step": 16117 + }, + { + "epoch": 0.6650986217710655, + "grad_norm": 2.3804329749210122, + "learning_rate": 7.992447652058715e-07, + "loss": 0.549, + "step": 16118 + }, + { + "epoch": 0.6651398861104234, + "grad_norm": 6.408691104328807, + "learning_rate": 7.990675171105793e-07, + "loss": 0.5174, + "step": 16119 + }, + { + "epoch": 0.6651811504497813, + "grad_norm": 3.3594801436971387, + "learning_rate": 7.988902815356946e-07, + "loss": 0.501, + "step": 16120 + }, + { + "epoch": 0.6652224147891392, + "grad_norm": 2.8915327671886835, + "learning_rate": 7.987130584843833e-07, + "loss": 0.5334, + "step": 16121 + }, + { + "epoch": 0.6652636791284972, + "grad_norm": 2.2317848035523506, + "learning_rate": 7.98535847959812e-07, + "loss": 0.5037, + "step": 16122 + }, + { + "epoch": 0.6653049434678551, + "grad_norm": 2.652990317291974, + "learning_rate": 7.983586499651442e-07, + "loss": 0.5336, + "step": 16123 + }, + { + "epoch": 0.665346207807213, + "grad_norm": 3.2746338503246535, + "learning_rate": 7.981814645035463e-07, + "loss": 0.5136, + "step": 16124 + }, + { + "epoch": 0.6653874721465709, + "grad_norm": 5.836681384211708, + "learning_rate": 7.980042915781833e-07, + "loss": 0.5398, + "step": 16125 + }, + { + "epoch": 0.6654287364859288, + "grad_norm": 2.380604580156824, + "learning_rate": 7.978271311922203e-07, + "loss": 0.5149, + "step": 16126 + }, + { + "epoch": 0.6654700008252867, + "grad_norm": 2.410427094105095, + "learning_rate": 7.976499833488207e-07, + "loss": 0.5359, + "step": 16127 + }, + { + "epoch": 0.6655112651646448, + "grad_norm": 2.5556689601783766, + "learning_rate": 7.974728480511495e-07, + "loss": 0.4697, + "step": 16128 + }, + { + "epoch": 0.6655525295040027, + "grad_norm": 2.9463886128965493, + "learning_rate": 7.972957253023706e-07, + "loss": 0.4991, + "step": 16129 + }, + { + "epoch": 0.6655937938433606, + "grad_norm": 2.613778966063685, + "learning_rate": 7.971186151056484e-07, + "loss": 0.5249, + "step": 16130 + }, + { + "epoch": 0.6656350581827185, + "grad_norm": 3.178671337641138, + "learning_rate": 7.969415174641463e-07, + "loss": 0.4968, + "step": 16131 + }, + { + "epoch": 0.6656763225220764, + "grad_norm": 3.3761232166968953, + "learning_rate": 7.967644323810268e-07, + "loss": 0.4773, + "step": 16132 + }, + { + "epoch": 0.6657175868614343, + "grad_norm": 5.353117181260808, + "learning_rate": 7.965873598594536e-07, + "loss": 0.515, + "step": 16133 + }, + { + "epoch": 0.6657588512007923, + "grad_norm": 2.885341166253287, + "learning_rate": 7.9641029990259e-07, + "loss": 0.5091, + "step": 16134 + }, + { + "epoch": 0.6658001155401502, + "grad_norm": 3.1178006547248303, + "learning_rate": 7.962332525135992e-07, + "loss": 0.4577, + "step": 16135 + }, + { + "epoch": 0.6658413798795081, + "grad_norm": 2.075893878474406, + "learning_rate": 7.960562176956423e-07, + "loss": 0.4927, + "step": 16136 + }, + { + "epoch": 0.665882644218866, + "grad_norm": 2.644579647934951, + "learning_rate": 7.958791954518825e-07, + "loss": 0.5426, + "step": 16137 + }, + { + "epoch": 0.665923908558224, + "grad_norm": 1.9570715035222765, + "learning_rate": 7.957021857854815e-07, + "loss": 0.4918, + "step": 16138 + }, + { + "epoch": 0.665965172897582, + "grad_norm": 2.6396196000003793, + "learning_rate": 7.955251886996022e-07, + "loss": 0.5129, + "step": 16139 + }, + { + "epoch": 0.6660064372369399, + "grad_norm": 2.8567422854667615, + "learning_rate": 7.953482041974047e-07, + "loss": 0.5283, + "step": 16140 + }, + { + "epoch": 0.6660477015762978, + "grad_norm": 58.43979006835859, + "learning_rate": 7.95171232282051e-07, + "loss": 0.5033, + "step": 16141 + }, + { + "epoch": 0.6660889659156557, + "grad_norm": 4.536898832436149, + "learning_rate": 7.94994272956703e-07, + "loss": 0.5265, + "step": 16142 + }, + { + "epoch": 0.6661302302550136, + "grad_norm": 19.6974852208702, + "learning_rate": 7.948173262245209e-07, + "loss": 0.4898, + "step": 16143 + }, + { + "epoch": 0.6661714945943715, + "grad_norm": 10.133190464977734, + "learning_rate": 7.946403920886645e-07, + "loss": 0.5464, + "step": 16144 + }, + { + "epoch": 0.6662127589337294, + "grad_norm": 2.2024341103760796, + "learning_rate": 7.944634705522958e-07, + "loss": 0.4999, + "step": 16145 + }, + { + "epoch": 0.6662540232730874, + "grad_norm": 2.6647209373712992, + "learning_rate": 7.942865616185742e-07, + "loss": 0.4947, + "step": 16146 + }, + { + "epoch": 0.6662952876124453, + "grad_norm": 51.013471813444504, + "learning_rate": 7.941096652906607e-07, + "loss": 0.5236, + "step": 16147 + }, + { + "epoch": 0.6663365519518033, + "grad_norm": 2.1858318711165787, + "learning_rate": 7.939327815717137e-07, + "loss": 0.5017, + "step": 16148 + }, + { + "epoch": 0.6663778162911612, + "grad_norm": 2.679908023162477, + "learning_rate": 7.937559104648937e-07, + "loss": 0.5652, + "step": 16149 + }, + { + "epoch": 0.6664190806305191, + "grad_norm": 3.6624225049453902, + "learning_rate": 7.935790519733597e-07, + "loss": 0.4975, + "step": 16150 + }, + { + "epoch": 0.666460344969877, + "grad_norm": 2.0616535322033065, + "learning_rate": 7.934022061002717e-07, + "loss": 0.5069, + "step": 16151 + }, + { + "epoch": 0.666501609309235, + "grad_norm": 3.7178305611148654, + "learning_rate": 7.932253728487875e-07, + "loss": 0.5412, + "step": 16152 + }, + { + "epoch": 0.6665428736485929, + "grad_norm": 2.3859997327343363, + "learning_rate": 7.930485522220661e-07, + "loss": 0.508, + "step": 16153 + }, + { + "epoch": 0.6665841379879508, + "grad_norm": 3.0103548500523813, + "learning_rate": 7.928717442232661e-07, + "loss": 0.5032, + "step": 16154 + }, + { + "epoch": 0.6666254023273087, + "grad_norm": 4.035345178770071, + "learning_rate": 7.926949488555463e-07, + "loss": 0.5081, + "step": 16155 + }, + { + "epoch": 0.6666666666666666, + "grad_norm": 2.2796097667671122, + "learning_rate": 7.92518166122064e-07, + "loss": 0.5331, + "step": 16156 + }, + { + "epoch": 0.6667079310060245, + "grad_norm": 4.955090021274319, + "learning_rate": 7.923413960259767e-07, + "loss": 0.5365, + "step": 16157 + }, + { + "epoch": 0.6667491953453826, + "grad_norm": 3.9522733978696447, + "learning_rate": 7.921646385704422e-07, + "loss": 0.5354, + "step": 16158 + }, + { + "epoch": 0.6667904596847405, + "grad_norm": 2.154944522358072, + "learning_rate": 7.919878937586187e-07, + "loss": 0.5397, + "step": 16159 + }, + { + "epoch": 0.6668317240240984, + "grad_norm": 3.6824406325705885, + "learning_rate": 7.91811161593662e-07, + "loss": 0.5367, + "step": 16160 + }, + { + "epoch": 0.6668729883634563, + "grad_norm": 4.9272219372725194, + "learning_rate": 7.916344420787295e-07, + "loss": 0.4947, + "step": 16161 + }, + { + "epoch": 0.6669142527028142, + "grad_norm": 3.373025208025309, + "learning_rate": 7.914577352169779e-07, + "loss": 0.5697, + "step": 16162 + }, + { + "epoch": 0.6669555170421722, + "grad_norm": 3.635366317614093, + "learning_rate": 7.912810410115643e-07, + "loss": 0.4632, + "step": 16163 + }, + { + "epoch": 0.6669967813815301, + "grad_norm": 6.780226110641813, + "learning_rate": 7.911043594656436e-07, + "loss": 0.5219, + "step": 16164 + }, + { + "epoch": 0.667038045720888, + "grad_norm": 5.976298463477922, + "learning_rate": 7.909276905823726e-07, + "loss": 0.5939, + "step": 16165 + }, + { + "epoch": 0.6670793100602459, + "grad_norm": 4.778804272042089, + "learning_rate": 7.907510343649067e-07, + "loss": 0.48, + "step": 16166 + }, + { + "epoch": 0.6671205743996038, + "grad_norm": 5.6243895366749035, + "learning_rate": 7.905743908164022e-07, + "loss": 0.5292, + "step": 16167 + }, + { + "epoch": 0.6671618387389618, + "grad_norm": 8.152697024961935, + "learning_rate": 7.903977599400133e-07, + "loss": 0.5005, + "step": 16168 + }, + { + "epoch": 0.6672031030783198, + "grad_norm": 3.147842392344605, + "learning_rate": 7.902211417388962e-07, + "loss": 0.4914, + "step": 16169 + }, + { + "epoch": 0.6672443674176777, + "grad_norm": 2.8066091143701524, + "learning_rate": 7.900445362162042e-07, + "loss": 0.5226, + "step": 16170 + }, + { + "epoch": 0.6672856317570356, + "grad_norm": 7.100936594120272, + "learning_rate": 7.89867943375093e-07, + "loss": 0.4876, + "step": 16171 + }, + { + "epoch": 0.6673268960963935, + "grad_norm": 4.4231032445248655, + "learning_rate": 7.896913632187175e-07, + "loss": 0.502, + "step": 16172 + }, + { + "epoch": 0.6673681604357514, + "grad_norm": 3.7917420612645762, + "learning_rate": 7.895147957502305e-07, + "loss": 0.5196, + "step": 16173 + }, + { + "epoch": 0.6674094247751093, + "grad_norm": 2.8579543708061266, + "learning_rate": 7.893382409727868e-07, + "loss": 0.4643, + "step": 16174 + }, + { + "epoch": 0.6674506891144673, + "grad_norm": 3.776372740245167, + "learning_rate": 7.891616988895395e-07, + "loss": 0.4418, + "step": 16175 + }, + { + "epoch": 0.6674919534538252, + "grad_norm": 2.3426465356529182, + "learning_rate": 7.889851695036434e-07, + "loss": 0.5542, + "step": 16176 + }, + { + "epoch": 0.6675332177931831, + "grad_norm": 4.274027567330499, + "learning_rate": 7.888086528182504e-07, + "loss": 0.4991, + "step": 16177 + }, + { + "epoch": 0.667574482132541, + "grad_norm": 2.6169550889392905, + "learning_rate": 7.886321488365136e-07, + "loss": 0.5499, + "step": 16178 + }, + { + "epoch": 0.667615746471899, + "grad_norm": 2.5562277058193534, + "learning_rate": 7.88455657561587e-07, + "loss": 0.5291, + "step": 16179 + }, + { + "epoch": 0.667657010811257, + "grad_norm": 3.3435530353427767, + "learning_rate": 7.882791789966217e-07, + "loss": 0.5747, + "step": 16180 + }, + { + "epoch": 0.6676982751506149, + "grad_norm": 4.903360364572021, + "learning_rate": 7.881027131447707e-07, + "loss": 0.5194, + "step": 16181 + }, + { + "epoch": 0.6677395394899728, + "grad_norm": 3.418424280313523, + "learning_rate": 7.879262600091868e-07, + "loss": 0.4861, + "step": 16182 + }, + { + "epoch": 0.6677808038293307, + "grad_norm": 3.5547515623608708, + "learning_rate": 7.877498195930207e-07, + "loss": 0.5892, + "step": 16183 + }, + { + "epoch": 0.6678220681686886, + "grad_norm": 3.3894068674562887, + "learning_rate": 7.87573391899425e-07, + "loss": 0.5172, + "step": 16184 + }, + { + "epoch": 0.6678633325080465, + "grad_norm": 5.068046983949999, + "learning_rate": 7.873969769315503e-07, + "loss": 0.576, + "step": 16185 + }, + { + "epoch": 0.6679045968474044, + "grad_norm": 3.9193162301228686, + "learning_rate": 7.872205746925483e-07, + "loss": 0.541, + "step": 16186 + }, + { + "epoch": 0.6679458611867624, + "grad_norm": 6.708209773718507, + "learning_rate": 7.8704418518557e-07, + "loss": 0.5403, + "step": 16187 + }, + { + "epoch": 0.6679871255261203, + "grad_norm": 5.129535507704327, + "learning_rate": 7.868678084137664e-07, + "loss": 0.5559, + "step": 16188 + }, + { + "epoch": 0.6680283898654783, + "grad_norm": 4.131169088522881, + "learning_rate": 7.866914443802873e-07, + "loss": 0.5266, + "step": 16189 + }, + { + "epoch": 0.6680696542048362, + "grad_norm": 2.8918852856881636, + "learning_rate": 7.865150930882833e-07, + "loss": 0.5328, + "step": 16190 + }, + { + "epoch": 0.6681109185441941, + "grad_norm": 3.7196423741042004, + "learning_rate": 7.863387545409047e-07, + "loss": 0.5639, + "step": 16191 + }, + { + "epoch": 0.668152182883552, + "grad_norm": 2.207150014836928, + "learning_rate": 7.861624287413018e-07, + "loss": 0.5079, + "step": 16192 + }, + { + "epoch": 0.66819344722291, + "grad_norm": 4.027724417687982, + "learning_rate": 7.859861156926233e-07, + "loss": 0.5105, + "step": 16193 + }, + { + "epoch": 0.6682347115622679, + "grad_norm": 2.4583330197716635, + "learning_rate": 7.858098153980188e-07, + "loss": 0.4636, + "step": 16194 + }, + { + "epoch": 0.6682759759016258, + "grad_norm": 2.116948077625093, + "learning_rate": 7.856335278606382e-07, + "loss": 0.5261, + "step": 16195 + }, + { + "epoch": 0.6683172402409837, + "grad_norm": 2.494355974139388, + "learning_rate": 7.854572530836298e-07, + "loss": 0.4709, + "step": 16196 + }, + { + "epoch": 0.6683585045803416, + "grad_norm": 2.168647045626882, + "learning_rate": 7.85280991070142e-07, + "loss": 0.5254, + "step": 16197 + }, + { + "epoch": 0.6683997689196995, + "grad_norm": 3.947708465605191, + "learning_rate": 7.851047418233235e-07, + "loss": 0.5264, + "step": 16198 + }, + { + "epoch": 0.6684410332590576, + "grad_norm": 3.904211361057824, + "learning_rate": 7.849285053463225e-07, + "loss": 0.4725, + "step": 16199 + }, + { + "epoch": 0.6684822975984155, + "grad_norm": 8.21136223967722, + "learning_rate": 7.847522816422881e-07, + "loss": 0.489, + "step": 16200 + }, + { + "epoch": 0.6685235619377734, + "grad_norm": 2.4124854538254503, + "learning_rate": 7.845760707143665e-07, + "loss": 0.5459, + "step": 16201 + }, + { + "epoch": 0.6685648262771313, + "grad_norm": 2.8056113148460473, + "learning_rate": 7.843998725657061e-07, + "loss": 0.56, + "step": 16202 + }, + { + "epoch": 0.6686060906164892, + "grad_norm": 2.6691414312781085, + "learning_rate": 7.842236871994541e-07, + "loss": 0.5213, + "step": 16203 + }, + { + "epoch": 0.6686473549558472, + "grad_norm": 8.330610717827312, + "learning_rate": 7.840475146187583e-07, + "loss": 0.5117, + "step": 16204 + }, + { + "epoch": 0.6686886192952051, + "grad_norm": 2.0137231706672516, + "learning_rate": 7.838713548267642e-07, + "loss": 0.4642, + "step": 16205 + }, + { + "epoch": 0.668729883634563, + "grad_norm": 2.9162846245230285, + "learning_rate": 7.836952078266193e-07, + "loss": 0.5394, + "step": 16206 + }, + { + "epoch": 0.6687711479739209, + "grad_norm": 2.9640178023691135, + "learning_rate": 7.835190736214699e-07, + "loss": 0.4884, + "step": 16207 + }, + { + "epoch": 0.6688124123132788, + "grad_norm": 2.146558563396399, + "learning_rate": 7.833429522144629e-07, + "loss": 0.495, + "step": 16208 + }, + { + "epoch": 0.6688536766526368, + "grad_norm": 4.023579201624499, + "learning_rate": 7.831668436087432e-07, + "loss": 0.4595, + "step": 16209 + }, + { + "epoch": 0.6688949409919948, + "grad_norm": 10.146688274423951, + "learning_rate": 7.829907478074566e-07, + "loss": 0.5443, + "step": 16210 + }, + { + "epoch": 0.6689362053313527, + "grad_norm": 4.592864811222147, + "learning_rate": 7.828146648137489e-07, + "loss": 0.4775, + "step": 16211 + }, + { + "epoch": 0.6689774696707106, + "grad_norm": 5.911399962330062, + "learning_rate": 7.826385946307654e-07, + "loss": 0.5155, + "step": 16212 + }, + { + "epoch": 0.6690187340100685, + "grad_norm": 2.333349736972019, + "learning_rate": 7.824625372616516e-07, + "loss": 0.4892, + "step": 16213 + }, + { + "epoch": 0.6690599983494264, + "grad_norm": 5.41320834843911, + "learning_rate": 7.822864927095517e-07, + "loss": 0.5151, + "step": 16214 + }, + { + "epoch": 0.6691012626887843, + "grad_norm": 3.1810892152399575, + "learning_rate": 7.821104609776101e-07, + "loss": 0.5203, + "step": 16215 + }, + { + "epoch": 0.6691425270281423, + "grad_norm": 2.8806879322401984, + "learning_rate": 7.819344420689717e-07, + "loss": 0.5477, + "step": 16216 + }, + { + "epoch": 0.6691837913675002, + "grad_norm": 3.6256512960693095, + "learning_rate": 7.81758435986781e-07, + "loss": 0.477, + "step": 16217 + }, + { + "epoch": 0.6692250557068581, + "grad_norm": 4.2990140737088955, + "learning_rate": 7.81582442734181e-07, + "loss": 0.5229, + "step": 16218 + }, + { + "epoch": 0.6692663200462161, + "grad_norm": 2.5301793146526257, + "learning_rate": 7.814064623143156e-07, + "loss": 0.574, + "step": 16219 + }, + { + "epoch": 0.669307584385574, + "grad_norm": 2.013444622678967, + "learning_rate": 7.81230494730329e-07, + "loss": 0.4969, + "step": 16220 + }, + { + "epoch": 0.6693488487249319, + "grad_norm": 2.7580273294701105, + "learning_rate": 7.810545399853632e-07, + "loss": 0.4715, + "step": 16221 + }, + { + "epoch": 0.6693901130642899, + "grad_norm": 2.7400886291466233, + "learning_rate": 7.808785980825627e-07, + "loss": 0.5943, + "step": 16222 + }, + { + "epoch": 0.6694313774036478, + "grad_norm": 2.8925003332336074, + "learning_rate": 7.807026690250686e-07, + "loss": 0.5359, + "step": 16223 + }, + { + "epoch": 0.6694726417430057, + "grad_norm": 4.240630972228171, + "learning_rate": 7.805267528160243e-07, + "loss": 0.4493, + "step": 16224 + }, + { + "epoch": 0.6695139060823636, + "grad_norm": 2.89171704588104, + "learning_rate": 7.803508494585726e-07, + "loss": 0.5194, + "step": 16225 + }, + { + "epoch": 0.6695551704217215, + "grad_norm": 3.166641504743326, + "learning_rate": 7.801749589558544e-07, + "loss": 0.5346, + "step": 16226 + }, + { + "epoch": 0.6695964347610794, + "grad_norm": 2.2576362807250243, + "learning_rate": 7.799990813110123e-07, + "loss": 0.4759, + "step": 16227 + }, + { + "epoch": 0.6696376991004374, + "grad_norm": 1.836081966989632, + "learning_rate": 7.798232165271878e-07, + "loss": 0.5109, + "step": 16228 + }, + { + "epoch": 0.6696789634397954, + "grad_norm": 2.994556653790051, + "learning_rate": 7.796473646075227e-07, + "loss": 0.4723, + "step": 16229 + }, + { + "epoch": 0.6697202277791533, + "grad_norm": 2.925659777571218, + "learning_rate": 7.794715255551571e-07, + "loss": 0.5147, + "step": 16230 + }, + { + "epoch": 0.6697614921185112, + "grad_norm": 3.195120525011624, + "learning_rate": 7.792956993732327e-07, + "loss": 0.4643, + "step": 16231 + }, + { + "epoch": 0.6698027564578691, + "grad_norm": 9.264539966115702, + "learning_rate": 7.791198860648899e-07, + "loss": 0.5248, + "step": 16232 + }, + { + "epoch": 0.669844020797227, + "grad_norm": 3.1405608253365696, + "learning_rate": 7.7894408563327e-07, + "loss": 0.5206, + "step": 16233 + }, + { + "epoch": 0.669885285136585, + "grad_norm": 2.6255206859976545, + "learning_rate": 7.787682980815118e-07, + "loss": 0.5396, + "step": 16234 + }, + { + "epoch": 0.6699265494759429, + "grad_norm": 3.0084398383427633, + "learning_rate": 7.785925234127568e-07, + "loss": 0.5314, + "step": 16235 + }, + { + "epoch": 0.6699678138153008, + "grad_norm": 2.8241145550145794, + "learning_rate": 7.784167616301434e-07, + "loss": 0.4976, + "step": 16236 + }, + { + "epoch": 0.6700090781546587, + "grad_norm": 3.651468087934394, + "learning_rate": 7.782410127368123e-07, + "loss": 0.5162, + "step": 16237 + }, + { + "epoch": 0.6700503424940166, + "grad_norm": 2.303433647590947, + "learning_rate": 7.780652767359015e-07, + "loss": 0.5555, + "step": 16238 + }, + { + "epoch": 0.6700916068333745, + "grad_norm": 2.1597145273781035, + "learning_rate": 7.778895536305511e-07, + "loss": 0.4773, + "step": 16239 + }, + { + "epoch": 0.6701328711727326, + "grad_norm": 2.230783161771353, + "learning_rate": 7.777138434238997e-07, + "loss": 0.4849, + "step": 16240 + }, + { + "epoch": 0.6701741355120905, + "grad_norm": 3.6920912962988015, + "learning_rate": 7.775381461190864e-07, + "loss": 0.5254, + "step": 16241 + }, + { + "epoch": 0.6702153998514484, + "grad_norm": 1.9329265095552512, + "learning_rate": 7.773624617192487e-07, + "loss": 0.4714, + "step": 16242 + }, + { + "epoch": 0.6702566641908063, + "grad_norm": 3.1182216012189348, + "learning_rate": 7.771867902275251e-07, + "loss": 0.4919, + "step": 16243 + }, + { + "epoch": 0.6702979285301642, + "grad_norm": 4.826867716941544, + "learning_rate": 7.770111316470536e-07, + "loss": 0.5256, + "step": 16244 + }, + { + "epoch": 0.6703391928695221, + "grad_norm": 12.356288192822849, + "learning_rate": 7.768354859809726e-07, + "loss": 0.4967, + "step": 16245 + }, + { + "epoch": 0.6703804572088801, + "grad_norm": 4.062237152686389, + "learning_rate": 7.766598532324184e-07, + "loss": 0.5537, + "step": 16246 + }, + { + "epoch": 0.670421721548238, + "grad_norm": 6.6379465912308255, + "learning_rate": 7.764842334045285e-07, + "loss": 0.5294, + "step": 16247 + }, + { + "epoch": 0.6704629858875959, + "grad_norm": 3.0422233964967886, + "learning_rate": 7.76308626500441e-07, + "loss": 0.4968, + "step": 16248 + }, + { + "epoch": 0.6705042502269538, + "grad_norm": 6.521978980892961, + "learning_rate": 7.76133032523291e-07, + "loss": 0.5495, + "step": 16249 + }, + { + "epoch": 0.6705455145663118, + "grad_norm": 2.4969790094543356, + "learning_rate": 7.759574514762165e-07, + "loss": 0.5453, + "step": 16250 + }, + { + "epoch": 0.6705867789056698, + "grad_norm": 2.160725867846757, + "learning_rate": 7.757818833623527e-07, + "loss": 0.5138, + "step": 16251 + }, + { + "epoch": 0.6706280432450277, + "grad_norm": 2.339606477381595, + "learning_rate": 7.756063281848362e-07, + "loss": 0.4886, + "step": 16252 + }, + { + "epoch": 0.6706693075843856, + "grad_norm": 4.6849493947724055, + "learning_rate": 7.754307859468027e-07, + "loss": 0.5005, + "step": 16253 + }, + { + "epoch": 0.6707105719237435, + "grad_norm": 5.112169850493298, + "learning_rate": 7.752552566513888e-07, + "loss": 0.5148, + "step": 16254 + }, + { + "epoch": 0.6707518362631014, + "grad_norm": 2.218280026313476, + "learning_rate": 7.750797403017283e-07, + "loss": 0.4658, + "step": 16255 + }, + { + "epoch": 0.6707931006024593, + "grad_norm": 5.840226174582202, + "learning_rate": 7.74904236900957e-07, + "loss": 0.5111, + "step": 16256 + }, + { + "epoch": 0.6708343649418173, + "grad_norm": 2.3315552507322574, + "learning_rate": 7.747287464522106e-07, + "loss": 0.5025, + "step": 16257 + }, + { + "epoch": 0.6708756292811752, + "grad_norm": 3.108431481975936, + "learning_rate": 7.745532689586228e-07, + "loss": 0.5011, + "step": 16258 + }, + { + "epoch": 0.6709168936205331, + "grad_norm": 2.4181515793132298, + "learning_rate": 7.74377804423328e-07, + "loss": 0.4957, + "step": 16259 + }, + { + "epoch": 0.6709581579598911, + "grad_norm": 5.852375585125804, + "learning_rate": 7.742023528494611e-07, + "loss": 0.5502, + "step": 16260 + }, + { + "epoch": 0.670999422299249, + "grad_norm": 7.965103022436505, + "learning_rate": 7.740269142401562e-07, + "loss": 0.5383, + "step": 16261 + }, + { + "epoch": 0.6710406866386069, + "grad_norm": 2.547649453461064, + "learning_rate": 7.738514885985472e-07, + "loss": 0.4895, + "step": 16262 + }, + { + "epoch": 0.6710819509779649, + "grad_norm": 4.37827534173126, + "learning_rate": 7.736760759277661e-07, + "loss": 0.507, + "step": 16263 + }, + { + "epoch": 0.6711232153173228, + "grad_norm": 2.8395987799916607, + "learning_rate": 7.735006762309473e-07, + "loss": 0.4871, + "step": 16264 + }, + { + "epoch": 0.6711644796566807, + "grad_norm": 3.0336736464724168, + "learning_rate": 7.733252895112241e-07, + "loss": 0.5089, + "step": 16265 + }, + { + "epoch": 0.6712057439960386, + "grad_norm": 3.0394911843404424, + "learning_rate": 7.731499157717295e-07, + "loss": 0.5204, + "step": 16266 + }, + { + "epoch": 0.6712470083353965, + "grad_norm": 2.3055405545024295, + "learning_rate": 7.729745550155953e-07, + "loss": 0.4514, + "step": 16267 + }, + { + "epoch": 0.6712882726747544, + "grad_norm": 4.077519546477873, + "learning_rate": 7.727992072459541e-07, + "loss": 0.547, + "step": 16268 + }, + { + "epoch": 0.6713295370141124, + "grad_norm": 6.9589545648962625, + "learning_rate": 7.726238724659382e-07, + "loss": 0.4826, + "step": 16269 + }, + { + "epoch": 0.6713708013534704, + "grad_norm": 14.082493838541287, + "learning_rate": 7.724485506786802e-07, + "loss": 0.4494, + "step": 16270 + }, + { + "epoch": 0.6714120656928283, + "grad_norm": 2.267420854101894, + "learning_rate": 7.722732418873107e-07, + "loss": 0.5039, + "step": 16271 + }, + { + "epoch": 0.6714533300321862, + "grad_norm": 3.238916672008761, + "learning_rate": 7.720979460949616e-07, + "loss": 0.4901, + "step": 16272 + }, + { + "epoch": 0.6714945943715441, + "grad_norm": 3.8911176316400793, + "learning_rate": 7.719226633047638e-07, + "loss": 0.5352, + "step": 16273 + }, + { + "epoch": 0.671535858710902, + "grad_norm": 2.158319368470975, + "learning_rate": 7.717473935198499e-07, + "loss": 0.4559, + "step": 16274 + }, + { + "epoch": 0.67157712305026, + "grad_norm": 3.437461590594968, + "learning_rate": 7.715721367433484e-07, + "loss": 0.4827, + "step": 16275 + }, + { + "epoch": 0.6716183873896179, + "grad_norm": 3.007947563631374, + "learning_rate": 7.713968929783907e-07, + "loss": 0.4982, + "step": 16276 + }, + { + "epoch": 0.6716596517289758, + "grad_norm": 5.0103696079088085, + "learning_rate": 7.71221662228107e-07, + "loss": 0.5368, + "step": 16277 + }, + { + "epoch": 0.6717009160683337, + "grad_norm": 3.5202744665674577, + "learning_rate": 7.710464444956282e-07, + "loss": 0.4978, + "step": 16278 + }, + { + "epoch": 0.6717421804076916, + "grad_norm": 3.632215891665955, + "learning_rate": 7.708712397840829e-07, + "loss": 0.5391, + "step": 16279 + }, + { + "epoch": 0.6717834447470497, + "grad_norm": 3.1750510909019067, + "learning_rate": 7.706960480966014e-07, + "loss": 0.5853, + "step": 16280 + }, + { + "epoch": 0.6718247090864076, + "grad_norm": 3.349315509164768, + "learning_rate": 7.705208694363129e-07, + "loss": 0.5629, + "step": 16281 + }, + { + "epoch": 0.6718659734257655, + "grad_norm": 3.6394185611900296, + "learning_rate": 7.703457038063469e-07, + "loss": 0.5466, + "step": 16282 + }, + { + "epoch": 0.6719072377651234, + "grad_norm": 3.356802120177248, + "learning_rate": 7.701705512098315e-07, + "loss": 0.4245, + "step": 16283 + }, + { + "epoch": 0.6719485021044813, + "grad_norm": 3.367835512482592, + "learning_rate": 7.699954116498958e-07, + "loss": 0.5484, + "step": 16284 + }, + { + "epoch": 0.6719897664438392, + "grad_norm": 6.551144592598588, + "learning_rate": 7.698202851296682e-07, + "loss": 0.4894, + "step": 16285 + }, + { + "epoch": 0.6720310307831971, + "grad_norm": 3.2456991306403165, + "learning_rate": 7.696451716522776e-07, + "loss": 0.4539, + "step": 16286 + }, + { + "epoch": 0.6720722951225551, + "grad_norm": 5.110940603038178, + "learning_rate": 7.694700712208512e-07, + "loss": 0.526, + "step": 16287 + }, + { + "epoch": 0.672113559461913, + "grad_norm": 1.6179535901716684, + "learning_rate": 7.692949838385163e-07, + "loss": 0.4777, + "step": 16288 + }, + { + "epoch": 0.6721548238012709, + "grad_norm": 6.054695837411343, + "learning_rate": 7.691199095084011e-07, + "loss": 0.5711, + "step": 16289 + }, + { + "epoch": 0.6721960881406289, + "grad_norm": 7.376085326659039, + "learning_rate": 7.689448482336325e-07, + "loss": 0.5678, + "step": 16290 + }, + { + "epoch": 0.6722373524799868, + "grad_norm": 2.909205274074208, + "learning_rate": 7.687698000173383e-07, + "loss": 0.558, + "step": 16291 + }, + { + "epoch": 0.6722786168193448, + "grad_norm": 2.6604230088417364, + "learning_rate": 7.685947648626444e-07, + "loss": 0.4691, + "step": 16292 + }, + { + "epoch": 0.6723198811587027, + "grad_norm": 5.455864773153786, + "learning_rate": 7.684197427726774e-07, + "loss": 0.5134, + "step": 16293 + }, + { + "epoch": 0.6723611454980606, + "grad_norm": 2.3805867467479715, + "learning_rate": 7.68244733750564e-07, + "loss": 0.547, + "step": 16294 + }, + { + "epoch": 0.6724024098374185, + "grad_norm": 16.95591547078227, + "learning_rate": 7.680697377994309e-07, + "loss": 0.5139, + "step": 16295 + }, + { + "epoch": 0.6724436741767764, + "grad_norm": 6.9697422859418925, + "learning_rate": 7.678947549224025e-07, + "loss": 0.5286, + "step": 16296 + }, + { + "epoch": 0.6724849385161343, + "grad_norm": 6.239966586198961, + "learning_rate": 7.677197851226055e-07, + "loss": 0.5281, + "step": 16297 + }, + { + "epoch": 0.6725262028554922, + "grad_norm": 3.2553886732129484, + "learning_rate": 7.675448284031653e-07, + "loss": 0.5207, + "step": 16298 + }, + { + "epoch": 0.6725674671948502, + "grad_norm": 3.0411017119056534, + "learning_rate": 7.673698847672066e-07, + "loss": 0.5133, + "step": 16299 + }, + { + "epoch": 0.6726087315342081, + "grad_norm": 7.81932592213605, + "learning_rate": 7.671949542178542e-07, + "loss": 0.5235, + "step": 16300 + }, + { + "epoch": 0.6726499958735661, + "grad_norm": 3.6650506724879754, + "learning_rate": 7.670200367582337e-07, + "loss": 0.5554, + "step": 16301 + }, + { + "epoch": 0.672691260212924, + "grad_norm": 12.076748192691516, + "learning_rate": 7.668451323914686e-07, + "loss": 0.4961, + "step": 16302 + }, + { + "epoch": 0.6727325245522819, + "grad_norm": 2.1278455963233296, + "learning_rate": 7.66670241120684e-07, + "loss": 0.4897, + "step": 16303 + }, + { + "epoch": 0.6727737888916399, + "grad_norm": 4.608345519081717, + "learning_rate": 7.664953629490027e-07, + "loss": 0.5545, + "step": 16304 + }, + { + "epoch": 0.6728150532309978, + "grad_norm": 2.329405852406411, + "learning_rate": 7.663204978795493e-07, + "loss": 0.5093, + "step": 16305 + }, + { + "epoch": 0.6728563175703557, + "grad_norm": 2.8179932888018797, + "learning_rate": 7.661456459154473e-07, + "loss": 0.4962, + "step": 16306 + }, + { + "epoch": 0.6728975819097136, + "grad_norm": 2.521918278402001, + "learning_rate": 7.659708070598204e-07, + "loss": 0.5121, + "step": 16307 + }, + { + "epoch": 0.6729388462490715, + "grad_norm": 2.8220689626704876, + "learning_rate": 7.657959813157904e-07, + "loss": 0.5795, + "step": 16308 + }, + { + "epoch": 0.6729801105884294, + "grad_norm": 8.15578583696254, + "learning_rate": 7.65621168686481e-07, + "loss": 0.4412, + "step": 16309 + }, + { + "epoch": 0.6730213749277874, + "grad_norm": 3.2911802974183426, + "learning_rate": 7.654463691750147e-07, + "loss": 0.5492, + "step": 16310 + }, + { + "epoch": 0.6730626392671454, + "grad_norm": 2.3373912462209696, + "learning_rate": 7.652715827845142e-07, + "loss": 0.5068, + "step": 16311 + }, + { + "epoch": 0.6731039036065033, + "grad_norm": 2.4048387367727253, + "learning_rate": 7.650968095181007e-07, + "loss": 0.5088, + "step": 16312 + }, + { + "epoch": 0.6731451679458612, + "grad_norm": 2.699513002621643, + "learning_rate": 7.649220493788968e-07, + "loss": 0.5077, + "step": 16313 + }, + { + "epoch": 0.6731864322852191, + "grad_norm": 2.532170554976369, + "learning_rate": 7.647473023700245e-07, + "loss": 0.6047, + "step": 16314 + }, + { + "epoch": 0.673227696624577, + "grad_norm": 6.765434380837335, + "learning_rate": 7.645725684946045e-07, + "loss": 0.533, + "step": 16315 + }, + { + "epoch": 0.673268960963935, + "grad_norm": 2.576298488282933, + "learning_rate": 7.643978477557579e-07, + "loss": 0.4984, + "step": 16316 + }, + { + "epoch": 0.6733102253032929, + "grad_norm": 1.8766045036235997, + "learning_rate": 7.642231401566058e-07, + "loss": 0.5426, + "step": 16317 + }, + { + "epoch": 0.6733514896426508, + "grad_norm": 7.927762544992702, + "learning_rate": 7.64048445700269e-07, + "loss": 0.5541, + "step": 16318 + }, + { + "epoch": 0.6733927539820087, + "grad_norm": 2.493022993689977, + "learning_rate": 7.638737643898687e-07, + "loss": 0.5691, + "step": 16319 + }, + { + "epoch": 0.6734340183213666, + "grad_norm": 4.520018230351162, + "learning_rate": 7.636990962285239e-07, + "loss": 0.5651, + "step": 16320 + }, + { + "epoch": 0.6734752826607247, + "grad_norm": 8.415083542278797, + "learning_rate": 7.635244412193553e-07, + "loss": 0.5067, + "step": 16321 + }, + { + "epoch": 0.6735165470000826, + "grad_norm": 2.0821732548748018, + "learning_rate": 7.633497993654822e-07, + "loss": 0.5522, + "step": 16322 + }, + { + "epoch": 0.6735578113394405, + "grad_norm": 3.0910425338049885, + "learning_rate": 7.631751706700254e-07, + "loss": 0.5707, + "step": 16323 + }, + { + "epoch": 0.6735990756787984, + "grad_norm": 8.41929284772586, + "learning_rate": 7.630005551361027e-07, + "loss": 0.4876, + "step": 16324 + }, + { + "epoch": 0.6736403400181563, + "grad_norm": 2.5657421311205093, + "learning_rate": 7.628259527668336e-07, + "loss": 0.5186, + "step": 16325 + }, + { + "epoch": 0.6736816043575142, + "grad_norm": 6.251980178477156, + "learning_rate": 7.626513635653373e-07, + "loss": 0.5358, + "step": 16326 + }, + { + "epoch": 0.6737228686968721, + "grad_norm": 2.2910799990898485, + "learning_rate": 7.624767875347325e-07, + "loss": 0.5144, + "step": 16327 + }, + { + "epoch": 0.6737641330362301, + "grad_norm": 4.370101144207568, + "learning_rate": 7.623022246781376e-07, + "loss": 0.5991, + "step": 16328 + }, + { + "epoch": 0.673805397375588, + "grad_norm": 3.768653331657614, + "learning_rate": 7.621276749986697e-07, + "loss": 0.4919, + "step": 16329 + }, + { + "epoch": 0.6738466617149459, + "grad_norm": 2.4296404754654604, + "learning_rate": 7.619531384994474e-07, + "loss": 0.4782, + "step": 16330 + }, + { + "epoch": 0.6738879260543039, + "grad_norm": 2.069725640613634, + "learning_rate": 7.617786151835883e-07, + "loss": 0.4824, + "step": 16331 + }, + { + "epoch": 0.6739291903936618, + "grad_norm": 3.0009625562288478, + "learning_rate": 7.616041050542104e-07, + "loss": 0.5269, + "step": 16332 + }, + { + "epoch": 0.6739704547330198, + "grad_norm": 2.338659936134613, + "learning_rate": 7.6142960811443e-07, + "loss": 0.5474, + "step": 16333 + }, + { + "epoch": 0.6740117190723777, + "grad_norm": 6.838853935777898, + "learning_rate": 7.612551243673642e-07, + "loss": 0.4606, + "step": 16334 + }, + { + "epoch": 0.6740529834117356, + "grad_norm": 16.135171539436865, + "learning_rate": 7.610806538161308e-07, + "loss": 0.5124, + "step": 16335 + }, + { + "epoch": 0.6740942477510935, + "grad_norm": 4.6357801419041476, + "learning_rate": 7.609061964638445e-07, + "loss": 0.5756, + "step": 16336 + }, + { + "epoch": 0.6741355120904514, + "grad_norm": 1.9768325760009395, + "learning_rate": 7.607317523136226e-07, + "loss": 0.5142, + "step": 16337 + }, + { + "epoch": 0.6741767764298093, + "grad_norm": 6.791696968680756, + "learning_rate": 7.60557321368581e-07, + "loss": 0.5486, + "step": 16338 + }, + { + "epoch": 0.6742180407691672, + "grad_norm": 5.438263260409163, + "learning_rate": 7.603829036318361e-07, + "loss": 0.5068, + "step": 16339 + }, + { + "epoch": 0.6742593051085252, + "grad_norm": 7.441111738487915, + "learning_rate": 7.602084991065028e-07, + "loss": 0.6023, + "step": 16340 + }, + { + "epoch": 0.6743005694478832, + "grad_norm": 2.718109803910861, + "learning_rate": 7.600341077956957e-07, + "loss": 0.4991, + "step": 16341 + }, + { + "epoch": 0.6743418337872411, + "grad_norm": 4.076900228421446, + "learning_rate": 7.598597297025304e-07, + "loss": 0.5561, + "step": 16342 + }, + { + "epoch": 0.674383098126599, + "grad_norm": 3.6092230000555796, + "learning_rate": 7.59685364830122e-07, + "loss": 0.4939, + "step": 16343 + }, + { + "epoch": 0.6744243624659569, + "grad_norm": 2.0815457438021823, + "learning_rate": 7.595110131815857e-07, + "loss": 0.4933, + "step": 16344 + }, + { + "epoch": 0.6744656268053149, + "grad_norm": 4.785848738653066, + "learning_rate": 7.593366747600342e-07, + "loss": 0.5138, + "step": 16345 + }, + { + "epoch": 0.6745068911446728, + "grad_norm": 6.9155060214894455, + "learning_rate": 7.591623495685828e-07, + "loss": 0.4916, + "step": 16346 + }, + { + "epoch": 0.6745481554840307, + "grad_norm": 1.8269768500894916, + "learning_rate": 7.589880376103452e-07, + "loss": 0.4988, + "step": 16347 + }, + { + "epoch": 0.6745894198233886, + "grad_norm": 3.8568596766110743, + "learning_rate": 7.588137388884353e-07, + "loss": 0.5133, + "step": 16348 + }, + { + "epoch": 0.6746306841627465, + "grad_norm": 3.516259537639921, + "learning_rate": 7.58639453405966e-07, + "loss": 0.4847, + "step": 16349 + }, + { + "epoch": 0.6746719485021044, + "grad_norm": 10.516907155879451, + "learning_rate": 7.584651811660504e-07, + "loss": 0.5557, + "step": 16350 + }, + { + "epoch": 0.6747132128414625, + "grad_norm": 2.609207363360507, + "learning_rate": 7.582909221718016e-07, + "loss": 0.5561, + "step": 16351 + }, + { + "epoch": 0.6747544771808204, + "grad_norm": 3.2020527329490602, + "learning_rate": 7.58116676426333e-07, + "loss": 0.5679, + "step": 16352 + }, + { + "epoch": 0.6747957415201783, + "grad_norm": 3.2003350596855835, + "learning_rate": 7.579424439327566e-07, + "loss": 0.4928, + "step": 16353 + }, + { + "epoch": 0.6748370058595362, + "grad_norm": 5.7013548887481145, + "learning_rate": 7.57768224694184e-07, + "loss": 0.479, + "step": 16354 + }, + { + "epoch": 0.6748782701988941, + "grad_norm": 3.0955823027920286, + "learning_rate": 7.575940187137274e-07, + "loss": 0.5245, + "step": 16355 + }, + { + "epoch": 0.674919534538252, + "grad_norm": 3.5473445424550945, + "learning_rate": 7.574198259944997e-07, + "loss": 0.62, + "step": 16356 + }, + { + "epoch": 0.67496079887761, + "grad_norm": 3.1692297164521053, + "learning_rate": 7.572456465396107e-07, + "loss": 0.5217, + "step": 16357 + }, + { + "epoch": 0.6750020632169679, + "grad_norm": 3.2805171323796483, + "learning_rate": 7.570714803521729e-07, + "loss": 0.504, + "step": 16358 + }, + { + "epoch": 0.6750433275563258, + "grad_norm": 4.686713731253686, + "learning_rate": 7.568973274352967e-07, + "loss": 0.5673, + "step": 16359 + }, + { + "epoch": 0.6750845918956837, + "grad_norm": 2.8117123744690025, + "learning_rate": 7.567231877920937e-07, + "loss": 0.4876, + "step": 16360 + }, + { + "epoch": 0.6751258562350417, + "grad_norm": 2.538299189675875, + "learning_rate": 7.565490614256737e-07, + "loss": 0.4254, + "step": 16361 + }, + { + "epoch": 0.6751671205743996, + "grad_norm": 3.102625420985637, + "learning_rate": 7.563749483391471e-07, + "loss": 0.4696, + "step": 16362 + }, + { + "epoch": 0.6752083849137576, + "grad_norm": 2.411964470903445, + "learning_rate": 7.562008485356241e-07, + "loss": 0.4415, + "step": 16363 + }, + { + "epoch": 0.6752496492531155, + "grad_norm": 3.1093136612147902, + "learning_rate": 7.560267620182155e-07, + "loss": 0.5068, + "step": 16364 + }, + { + "epoch": 0.6752909135924734, + "grad_norm": 2.262057849412139, + "learning_rate": 7.558526887900294e-07, + "loss": 0.465, + "step": 16365 + }, + { + "epoch": 0.6753321779318313, + "grad_norm": 3.034649065462409, + "learning_rate": 7.556786288541762e-07, + "loss": 0.4835, + "step": 16366 + }, + { + "epoch": 0.6753734422711892, + "grad_norm": 2.553486939072573, + "learning_rate": 7.555045822137641e-07, + "loss": 0.5531, + "step": 16367 + }, + { + "epoch": 0.6754147066105471, + "grad_norm": 2.5963863986916196, + "learning_rate": 7.553305488719028e-07, + "loss": 0.5155, + "step": 16368 + }, + { + "epoch": 0.6754559709499051, + "grad_norm": 7.4239420533406255, + "learning_rate": 7.551565288317014e-07, + "loss": 0.5004, + "step": 16369 + }, + { + "epoch": 0.675497235289263, + "grad_norm": 4.531544670965816, + "learning_rate": 7.549825220962672e-07, + "loss": 0.5632, + "step": 16370 + }, + { + "epoch": 0.6755384996286209, + "grad_norm": 1.9102564675353189, + "learning_rate": 7.548085286687088e-07, + "loss": 0.4881, + "step": 16371 + }, + { + "epoch": 0.6755797639679789, + "grad_norm": 23.819687832782556, + "learning_rate": 7.546345485521342e-07, + "loss": 0.517, + "step": 16372 + }, + { + "epoch": 0.6756210283073368, + "grad_norm": 2.43689269560161, + "learning_rate": 7.544605817496517e-07, + "loss": 0.4698, + "step": 16373 + }, + { + "epoch": 0.6756622926466948, + "grad_norm": 2.285570959090447, + "learning_rate": 7.542866282643678e-07, + "loss": 0.5023, + "step": 16374 + }, + { + "epoch": 0.6757035569860527, + "grad_norm": 7.960264818031886, + "learning_rate": 7.541126880993901e-07, + "loss": 0.491, + "step": 16375 + }, + { + "epoch": 0.6757448213254106, + "grad_norm": 1.672834333810875, + "learning_rate": 7.539387612578263e-07, + "loss": 0.4727, + "step": 16376 + }, + { + "epoch": 0.6757860856647685, + "grad_norm": 3.318900225147528, + "learning_rate": 7.537648477427822e-07, + "loss": 0.561, + "step": 16377 + }, + { + "epoch": 0.6758273500041264, + "grad_norm": 4.955201675127155, + "learning_rate": 7.535909475573645e-07, + "loss": 0.5183, + "step": 16378 + }, + { + "epoch": 0.6758686143434843, + "grad_norm": 2.8496798784701847, + "learning_rate": 7.534170607046805e-07, + "loss": 0.5001, + "step": 16379 + }, + { + "epoch": 0.6759098786828422, + "grad_norm": 5.600852909232464, + "learning_rate": 7.532431871878345e-07, + "loss": 0.5253, + "step": 16380 + }, + { + "epoch": 0.6759511430222002, + "grad_norm": 2.2723177732953532, + "learning_rate": 7.530693270099343e-07, + "loss": 0.4656, + "step": 16381 + }, + { + "epoch": 0.6759924073615582, + "grad_norm": 2.307351439489029, + "learning_rate": 7.528954801740836e-07, + "loss": 0.5003, + "step": 16382 + }, + { + "epoch": 0.6760336717009161, + "grad_norm": 1.7709712816508214, + "learning_rate": 7.527216466833888e-07, + "loss": 0.4868, + "step": 16383 + }, + { + "epoch": 0.676074936040274, + "grad_norm": 3.2329482493606867, + "learning_rate": 7.525478265409547e-07, + "loss": 0.5245, + "step": 16384 + }, + { + "epoch": 0.6761162003796319, + "grad_norm": 11.740440240178945, + "learning_rate": 7.523740197498868e-07, + "loss": 0.5257, + "step": 16385 + }, + { + "epoch": 0.6761574647189899, + "grad_norm": 2.34697644297454, + "learning_rate": 7.522002263132885e-07, + "loss": 0.4699, + "step": 16386 + }, + { + "epoch": 0.6761987290583478, + "grad_norm": 2.235848356453836, + "learning_rate": 7.520264462342649e-07, + "loss": 0.5197, + "step": 16387 + }, + { + "epoch": 0.6762399933977057, + "grad_norm": 5.189230972293924, + "learning_rate": 7.518526795159203e-07, + "loss": 0.5495, + "step": 16388 + }, + { + "epoch": 0.6762812577370636, + "grad_norm": 2.3570462572506106, + "learning_rate": 7.516789261613591e-07, + "loss": 0.5758, + "step": 16389 + }, + { + "epoch": 0.6763225220764215, + "grad_norm": 2.8906565624495717, + "learning_rate": 7.515051861736834e-07, + "loss": 0.5092, + "step": 16390 + }, + { + "epoch": 0.6763637864157794, + "grad_norm": 2.7935355633969383, + "learning_rate": 7.513314595559975e-07, + "loss": 0.5031, + "step": 16391 + }, + { + "epoch": 0.6764050507551375, + "grad_norm": 2.2845377458052676, + "learning_rate": 7.511577463114049e-07, + "loss": 0.4765, + "step": 16392 + }, + { + "epoch": 0.6764463150944954, + "grad_norm": 5.084859205829934, + "learning_rate": 7.509840464430091e-07, + "loss": 0.4807, + "step": 16393 + }, + { + "epoch": 0.6764875794338533, + "grad_norm": 2.662173508314341, + "learning_rate": 7.50810359953911e-07, + "loss": 0.4856, + "step": 16394 + }, + { + "epoch": 0.6765288437732112, + "grad_norm": 5.061843021196516, + "learning_rate": 7.50636686847214e-07, + "loss": 0.5359, + "step": 16395 + }, + { + "epoch": 0.6765701081125691, + "grad_norm": 4.123649743492409, + "learning_rate": 7.504630271260206e-07, + "loss": 0.4806, + "step": 16396 + }, + { + "epoch": 0.676611372451927, + "grad_norm": 2.353470625573989, + "learning_rate": 7.502893807934329e-07, + "loss": 0.4674, + "step": 16397 + }, + { + "epoch": 0.676652636791285, + "grad_norm": 4.097655850513183, + "learning_rate": 7.50115747852552e-07, + "loss": 0.5403, + "step": 16398 + }, + { + "epoch": 0.6766939011306429, + "grad_norm": 4.582559339599041, + "learning_rate": 7.499421283064795e-07, + "loss": 0.5132, + "step": 16399 + }, + { + "epoch": 0.6767351654700008, + "grad_norm": 1.928353304558418, + "learning_rate": 7.497685221583172e-07, + "loss": 0.4718, + "step": 16400 + }, + { + "epoch": 0.6767764298093587, + "grad_norm": 34.475839904894364, + "learning_rate": 7.495949294111662e-07, + "loss": 0.501, + "step": 16401 + }, + { + "epoch": 0.6768176941487167, + "grad_norm": 2.0244097835139523, + "learning_rate": 7.494213500681267e-07, + "loss": 0.5059, + "step": 16402 + }, + { + "epoch": 0.6768589584880746, + "grad_norm": 2.658550063920384, + "learning_rate": 7.492477841322993e-07, + "loss": 0.4904, + "step": 16403 + }, + { + "epoch": 0.6769002228274326, + "grad_norm": 2.4900415855006166, + "learning_rate": 7.490742316067847e-07, + "loss": 0.4742, + "step": 16404 + }, + { + "epoch": 0.6769414871667905, + "grad_norm": 2.58457827357091, + "learning_rate": 7.489006924946835e-07, + "loss": 0.5075, + "step": 16405 + }, + { + "epoch": 0.6769827515061484, + "grad_norm": 2.0056473672682054, + "learning_rate": 7.487271667990947e-07, + "loss": 0.4974, + "step": 16406 + }, + { + "epoch": 0.6770240158455063, + "grad_norm": 3.022441146490694, + "learning_rate": 7.485536545231176e-07, + "loss": 0.4256, + "step": 16407 + }, + { + "epoch": 0.6770652801848642, + "grad_norm": 3.8015425958039426, + "learning_rate": 7.483801556698521e-07, + "loss": 0.52, + "step": 16408 + }, + { + "epoch": 0.6771065445242221, + "grad_norm": 5.178884713097361, + "learning_rate": 7.482066702423973e-07, + "loss": 0.5904, + "step": 16409 + }, + { + "epoch": 0.6771478088635801, + "grad_norm": 5.185617057162637, + "learning_rate": 7.480331982438525e-07, + "loss": 0.5228, + "step": 16410 + }, + { + "epoch": 0.677189073202938, + "grad_norm": 9.90517909819701, + "learning_rate": 7.478597396773155e-07, + "loss": 0.6393, + "step": 16411 + }, + { + "epoch": 0.677230337542296, + "grad_norm": 2.7203020700023783, + "learning_rate": 7.476862945458852e-07, + "loss": 0.5451, + "step": 16412 + }, + { + "epoch": 0.6772716018816539, + "grad_norm": 9.798092545499394, + "learning_rate": 7.4751286285266e-07, + "loss": 0.506, + "step": 16413 + }, + { + "epoch": 0.6773128662210118, + "grad_norm": 3.0417239818701876, + "learning_rate": 7.47339444600737e-07, + "loss": 0.5649, + "step": 16414 + }, + { + "epoch": 0.6773541305603697, + "grad_norm": 4.106015052590562, + "learning_rate": 7.471660397932144e-07, + "loss": 0.4384, + "step": 16415 + }, + { + "epoch": 0.6773953948997277, + "grad_norm": 2.059258652849155, + "learning_rate": 7.469926484331897e-07, + "loss": 0.5095, + "step": 16416 + }, + { + "epoch": 0.6774366592390856, + "grad_norm": 2.9165581510599035, + "learning_rate": 7.468192705237606e-07, + "loss": 0.5054, + "step": 16417 + }, + { + "epoch": 0.6774779235784435, + "grad_norm": 7.173463242679833, + "learning_rate": 7.466459060680229e-07, + "loss": 0.5737, + "step": 16418 + }, + { + "epoch": 0.6775191879178014, + "grad_norm": 3.4924322330942026, + "learning_rate": 7.464725550690745e-07, + "loss": 0.538, + "step": 16419 + }, + { + "epoch": 0.6775604522571593, + "grad_norm": 5.03495707071725, + "learning_rate": 7.462992175300108e-07, + "loss": 0.4811, + "step": 16420 + }, + { + "epoch": 0.6776017165965172, + "grad_norm": 4.282023486226405, + "learning_rate": 7.461258934539285e-07, + "loss": 0.588, + "step": 16421 + }, + { + "epoch": 0.6776429809358753, + "grad_norm": 2.2179459617377093, + "learning_rate": 7.459525828439242e-07, + "loss": 0.4392, + "step": 16422 + }, + { + "epoch": 0.6776842452752332, + "grad_norm": 9.587818038011516, + "learning_rate": 7.457792857030923e-07, + "loss": 0.5132, + "step": 16423 + }, + { + "epoch": 0.6777255096145911, + "grad_norm": 3.1497096991481897, + "learning_rate": 7.456060020345296e-07, + "loss": 0.5068, + "step": 16424 + }, + { + "epoch": 0.677766773953949, + "grad_norm": 2.227140137026708, + "learning_rate": 7.454327318413305e-07, + "loss": 0.5626, + "step": 16425 + }, + { + "epoch": 0.6778080382933069, + "grad_norm": 2.4158371350865804, + "learning_rate": 7.452594751265912e-07, + "loss": 0.5218, + "step": 16426 + }, + { + "epoch": 0.6778493026326649, + "grad_norm": 3.376573888301465, + "learning_rate": 7.450862318934052e-07, + "loss": 0.5293, + "step": 16427 + }, + { + "epoch": 0.6778905669720228, + "grad_norm": 2.888675374830244, + "learning_rate": 7.449130021448675e-07, + "loss": 0.5613, + "step": 16428 + }, + { + "epoch": 0.6779318313113807, + "grad_norm": 3.601073605176832, + "learning_rate": 7.447397858840727e-07, + "loss": 0.5489, + "step": 16429 + }, + { + "epoch": 0.6779730956507386, + "grad_norm": 3.9376818176444703, + "learning_rate": 7.445665831141152e-07, + "loss": 0.5826, + "step": 16430 + }, + { + "epoch": 0.6780143599900965, + "grad_norm": 9.226873244836424, + "learning_rate": 7.443933938380876e-07, + "loss": 0.4971, + "step": 16431 + }, + { + "epoch": 0.6780556243294544, + "grad_norm": 3.3284598582624434, + "learning_rate": 7.442202180590849e-07, + "loss": 0.5332, + "step": 16432 + }, + { + "epoch": 0.6780968886688125, + "grad_norm": 6.29880352230135, + "learning_rate": 7.440470557801992e-07, + "loss": 0.491, + "step": 16433 + }, + { + "epoch": 0.6781381530081704, + "grad_norm": 2.4461258135933766, + "learning_rate": 7.438739070045247e-07, + "loss": 0.5016, + "step": 16434 + }, + { + "epoch": 0.6781794173475283, + "grad_norm": 1.8163537557875769, + "learning_rate": 7.437007717351534e-07, + "loss": 0.5183, + "step": 16435 + }, + { + "epoch": 0.6782206816868862, + "grad_norm": 2.96064353965122, + "learning_rate": 7.43527649975178e-07, + "loss": 0.5243, + "step": 16436 + }, + { + "epoch": 0.6782619460262441, + "grad_norm": 2.300552759961967, + "learning_rate": 7.433545417276913e-07, + "loss": 0.5348, + "step": 16437 + }, + { + "epoch": 0.678303210365602, + "grad_norm": 4.082592739181349, + "learning_rate": 7.43181446995786e-07, + "loss": 0.4818, + "step": 16438 + }, + { + "epoch": 0.67834447470496, + "grad_norm": 3.739487899346496, + "learning_rate": 7.430083657825526e-07, + "loss": 0.4985, + "step": 16439 + }, + { + "epoch": 0.6783857390443179, + "grad_norm": 2.0113491727674226, + "learning_rate": 7.428352980910837e-07, + "loss": 0.4987, + "step": 16440 + }, + { + "epoch": 0.6784270033836758, + "grad_norm": 2.2666959892587473, + "learning_rate": 7.426622439244703e-07, + "loss": 0.5575, + "step": 16441 + }, + { + "epoch": 0.6784682677230337, + "grad_norm": 3.916168934199157, + "learning_rate": 7.424892032858045e-07, + "loss": 0.5346, + "step": 16442 + }, + { + "epoch": 0.6785095320623917, + "grad_norm": 10.847578034628881, + "learning_rate": 7.423161761781758e-07, + "loss": 0.5319, + "step": 16443 + }, + { + "epoch": 0.6785507964017496, + "grad_norm": 3.032496152452866, + "learning_rate": 7.421431626046758e-07, + "loss": 0.5029, + "step": 16444 + }, + { + "epoch": 0.6785920607411076, + "grad_norm": 4.953274301503881, + "learning_rate": 7.419701625683952e-07, + "loss": 0.4753, + "step": 16445 + }, + { + "epoch": 0.6786333250804655, + "grad_norm": 4.727269059148939, + "learning_rate": 7.417971760724233e-07, + "loss": 0.5154, + "step": 16446 + }, + { + "epoch": 0.6786745894198234, + "grad_norm": 2.9550730942646632, + "learning_rate": 7.416242031198513e-07, + "loss": 0.4857, + "step": 16447 + }, + { + "epoch": 0.6787158537591813, + "grad_norm": 4.192542026377243, + "learning_rate": 7.414512437137673e-07, + "loss": 0.4903, + "step": 16448 + }, + { + "epoch": 0.6787571180985392, + "grad_norm": 4.390768570759025, + "learning_rate": 7.412782978572619e-07, + "loss": 0.4968, + "step": 16449 + }, + { + "epoch": 0.6787983824378971, + "grad_norm": 2.6911220929219986, + "learning_rate": 7.411053655534241e-07, + "loss": 0.5127, + "step": 16450 + }, + { + "epoch": 0.6788396467772551, + "grad_norm": 3.367565257602259, + "learning_rate": 7.409324468053435e-07, + "loss": 0.5459, + "step": 16451 + }, + { + "epoch": 0.678880911116613, + "grad_norm": 3.6245274028991736, + "learning_rate": 7.407595416161078e-07, + "loss": 0.5119, + "step": 16452 + }, + { + "epoch": 0.678922175455971, + "grad_norm": 3.7169135008071956, + "learning_rate": 7.405866499888058e-07, + "loss": 0.5378, + "step": 16453 + }, + { + "epoch": 0.6789634397953289, + "grad_norm": 2.1300995650741803, + "learning_rate": 7.404137719265268e-07, + "loss": 0.5207, + "step": 16454 + }, + { + "epoch": 0.6790047041346868, + "grad_norm": 3.017778768202165, + "learning_rate": 7.402409074323572e-07, + "loss": 0.5666, + "step": 16455 + }, + { + "epoch": 0.6790459684740447, + "grad_norm": 2.9323315147630393, + "learning_rate": 7.400680565093858e-07, + "loss": 0.5465, + "step": 16456 + }, + { + "epoch": 0.6790872328134027, + "grad_norm": 4.490720877254102, + "learning_rate": 7.398952191607001e-07, + "loss": 0.5593, + "step": 16457 + }, + { + "epoch": 0.6791284971527606, + "grad_norm": 4.778700451530274, + "learning_rate": 7.397223953893876e-07, + "loss": 0.4816, + "step": 16458 + }, + { + "epoch": 0.6791697614921185, + "grad_norm": 2.5908351093967887, + "learning_rate": 7.395495851985352e-07, + "loss": 0.5086, + "step": 16459 + }, + { + "epoch": 0.6792110258314764, + "grad_norm": 3.4112858017476184, + "learning_rate": 7.393767885912288e-07, + "loss": 0.4961, + "step": 16460 + }, + { + "epoch": 0.6792522901708343, + "grad_norm": 13.0238779450004, + "learning_rate": 7.392040055705558e-07, + "loss": 0.5091, + "step": 16461 + }, + { + "epoch": 0.6792935545101922, + "grad_norm": 3.1099232025839973, + "learning_rate": 7.390312361396026e-07, + "loss": 0.492, + "step": 16462 + }, + { + "epoch": 0.6793348188495503, + "grad_norm": 3.9462782513525165, + "learning_rate": 7.388584803014556e-07, + "loss": 0.5354, + "step": 16463 + }, + { + "epoch": 0.6793760831889082, + "grad_norm": 2.780090189630854, + "learning_rate": 7.386857380591999e-07, + "loss": 0.5102, + "step": 16464 + }, + { + "epoch": 0.6794173475282661, + "grad_norm": 12.026663369728842, + "learning_rate": 7.385130094159211e-07, + "loss": 0.5253, + "step": 16465 + }, + { + "epoch": 0.679458611867624, + "grad_norm": 6.238732738325834, + "learning_rate": 7.383402943747052e-07, + "loss": 0.5067, + "step": 16466 + }, + { + "epoch": 0.6794998762069819, + "grad_norm": 3.342184249113044, + "learning_rate": 7.381675929386376e-07, + "loss": 0.5272, + "step": 16467 + }, + { + "epoch": 0.6795411405463399, + "grad_norm": 2.104384695436514, + "learning_rate": 7.379949051108018e-07, + "loss": 0.484, + "step": 16468 + }, + { + "epoch": 0.6795824048856978, + "grad_norm": 7.511802079921577, + "learning_rate": 7.378222308942833e-07, + "loss": 0.5547, + "step": 16469 + }, + { + "epoch": 0.6796236692250557, + "grad_norm": 2.6736869918553037, + "learning_rate": 7.376495702921665e-07, + "loss": 0.6391, + "step": 16470 + }, + { + "epoch": 0.6796649335644136, + "grad_norm": 5.111130161357433, + "learning_rate": 7.374769233075367e-07, + "loss": 0.5338, + "step": 16471 + }, + { + "epoch": 0.6797061979037715, + "grad_norm": 3.525537608812371, + "learning_rate": 7.373042899434752e-07, + "loss": 0.5362, + "step": 16472 + }, + { + "epoch": 0.6797474622431295, + "grad_norm": 2.190929384773673, + "learning_rate": 7.371316702030674e-07, + "loss": 0.4987, + "step": 16473 + }, + { + "epoch": 0.6797887265824875, + "grad_norm": 2.854998481275573, + "learning_rate": 7.36959064089396e-07, + "loss": 0.5588, + "step": 16474 + }, + { + "epoch": 0.6798299909218454, + "grad_norm": 7.299935024283789, + "learning_rate": 7.367864716055453e-07, + "loss": 0.5379, + "step": 16475 + }, + { + "epoch": 0.6798712552612033, + "grad_norm": 3.1098538408398517, + "learning_rate": 7.36613892754597e-07, + "loss": 0.4884, + "step": 16476 + }, + { + "epoch": 0.6799125196005612, + "grad_norm": 2.8104233386468565, + "learning_rate": 7.364413275396341e-07, + "loss": 0.5163, + "step": 16477 + }, + { + "epoch": 0.6799537839399191, + "grad_norm": 2.5552347680250262, + "learning_rate": 7.362687759637393e-07, + "loss": 0.5416, + "step": 16478 + }, + { + "epoch": 0.679995048279277, + "grad_norm": 3.5725640344135723, + "learning_rate": 7.360962380299954e-07, + "loss": 0.4975, + "step": 16479 + }, + { + "epoch": 0.680036312618635, + "grad_norm": 3.447614995288328, + "learning_rate": 7.359237137414831e-07, + "loss": 0.4699, + "step": 16480 + }, + { + "epoch": 0.6800775769579929, + "grad_norm": 2.1683645177284285, + "learning_rate": 7.357512031012845e-07, + "loss": 0.5178, + "step": 16481 + }, + { + "epoch": 0.6801188412973508, + "grad_norm": 5.103068699944017, + "learning_rate": 7.355787061124814e-07, + "loss": 0.4946, + "step": 16482 + }, + { + "epoch": 0.6801601056367088, + "grad_norm": 6.731158235574007, + "learning_rate": 7.354062227781555e-07, + "loss": 0.5277, + "step": 16483 + }, + { + "epoch": 0.6802013699760667, + "grad_norm": 3.1530871804593295, + "learning_rate": 7.352337531013866e-07, + "loss": 0.4818, + "step": 16484 + }, + { + "epoch": 0.6802426343154246, + "grad_norm": 2.8470476360972667, + "learning_rate": 7.350612970852566e-07, + "loss": 0.4851, + "step": 16485 + }, + { + "epoch": 0.6802838986547826, + "grad_norm": 5.493995916138794, + "learning_rate": 7.348888547328447e-07, + "loss": 0.5269, + "step": 16486 + }, + { + "epoch": 0.6803251629941405, + "grad_norm": 20.044423412595208, + "learning_rate": 7.347164260472319e-07, + "loss": 0.5118, + "step": 16487 + }, + { + "epoch": 0.6803664273334984, + "grad_norm": 2.6628480909758747, + "learning_rate": 7.345440110314989e-07, + "loss": 0.493, + "step": 16488 + }, + { + "epoch": 0.6804076916728563, + "grad_norm": 2.976255602904793, + "learning_rate": 7.343716096887242e-07, + "loss": 0.5168, + "step": 16489 + }, + { + "epoch": 0.6804489560122142, + "grad_norm": 14.566496144825289, + "learning_rate": 7.341992220219877e-07, + "loss": 0.523, + "step": 16490 + }, + { + "epoch": 0.6804902203515721, + "grad_norm": 4.199537501093714, + "learning_rate": 7.340268480343695e-07, + "loss": 0.4889, + "step": 16491 + }, + { + "epoch": 0.68053148469093, + "grad_norm": 3.1400942662496476, + "learning_rate": 7.338544877289473e-07, + "loss": 0.488, + "step": 16492 + }, + { + "epoch": 0.680572749030288, + "grad_norm": 3.0063301201776538, + "learning_rate": 7.336821411088006e-07, + "loss": 0.4846, + "step": 16493 + }, + { + "epoch": 0.680614013369646, + "grad_norm": 7.3605966200715, + "learning_rate": 7.335098081770079e-07, + "loss": 0.5577, + "step": 16494 + }, + { + "epoch": 0.6806552777090039, + "grad_norm": 2.4881778201281244, + "learning_rate": 7.33337488936648e-07, + "loss": 0.4939, + "step": 16495 + }, + { + "epoch": 0.6806965420483618, + "grad_norm": 2.4940378824216918, + "learning_rate": 7.331651833907981e-07, + "loss": 0.5471, + "step": 16496 + }, + { + "epoch": 0.6807378063877197, + "grad_norm": 2.336251380181755, + "learning_rate": 7.329928915425361e-07, + "loss": 0.5183, + "step": 16497 + }, + { + "epoch": 0.6807790707270777, + "grad_norm": 8.00442334417925, + "learning_rate": 7.328206133949406e-07, + "loss": 0.5023, + "step": 16498 + }, + { + "epoch": 0.6808203350664356, + "grad_norm": 3.0816557074257607, + "learning_rate": 7.326483489510874e-07, + "loss": 0.4717, + "step": 16499 + }, + { + "epoch": 0.6808615994057935, + "grad_norm": 2.299657202886824, + "learning_rate": 7.324760982140548e-07, + "loss": 0.5193, + "step": 16500 + }, + { + "epoch": 0.6809028637451514, + "grad_norm": 2.3314965436339685, + "learning_rate": 7.323038611869187e-07, + "loss": 0.5103, + "step": 16501 + }, + { + "epoch": 0.6809441280845093, + "grad_norm": 3.848047978293363, + "learning_rate": 7.321316378727561e-07, + "loss": 0.4466, + "step": 16502 + }, + { + "epoch": 0.6809853924238672, + "grad_norm": 3.048854111912794, + "learning_rate": 7.319594282746434e-07, + "loss": 0.4924, + "step": 16503 + }, + { + "epoch": 0.6810266567632253, + "grad_norm": 3.1953986612654868, + "learning_rate": 7.317872323956571e-07, + "loss": 0.5381, + "step": 16504 + }, + { + "epoch": 0.6810679211025832, + "grad_norm": 2.9318895873453608, + "learning_rate": 7.316150502388722e-07, + "loss": 0.5011, + "step": 16505 + }, + { + "epoch": 0.6811091854419411, + "grad_norm": 2.5884428040260095, + "learning_rate": 7.314428818073646e-07, + "loss": 0.4574, + "step": 16506 + }, + { + "epoch": 0.681150449781299, + "grad_norm": 2.234007011332722, + "learning_rate": 7.312707271042097e-07, + "loss": 0.5027, + "step": 16507 + }, + { + "epoch": 0.6811917141206569, + "grad_norm": 2.619930826685085, + "learning_rate": 7.310985861324834e-07, + "loss": 0.5244, + "step": 16508 + }, + { + "epoch": 0.6812329784600148, + "grad_norm": 5.7976848822340985, + "learning_rate": 7.309264588952592e-07, + "loss": 0.5048, + "step": 16509 + }, + { + "epoch": 0.6812742427993728, + "grad_norm": 2.310989674072661, + "learning_rate": 7.307543453956125e-07, + "loss": 0.526, + "step": 16510 + }, + { + "epoch": 0.6813155071387307, + "grad_norm": 4.238829788404593, + "learning_rate": 7.30582245636618e-07, + "loss": 0.5005, + "step": 16511 + }, + { + "epoch": 0.6813567714780886, + "grad_norm": 5.911770336318621, + "learning_rate": 7.304101596213494e-07, + "loss": 0.4608, + "step": 16512 + }, + { + "epoch": 0.6813980358174465, + "grad_norm": 18.22294301117317, + "learning_rate": 7.3023808735288e-07, + "loss": 0.531, + "step": 16513 + }, + { + "epoch": 0.6814393001568045, + "grad_norm": 5.337331655316868, + "learning_rate": 7.30066028834284e-07, + "loss": 0.5476, + "step": 16514 + }, + { + "epoch": 0.6814805644961625, + "grad_norm": 3.8870962065844115, + "learning_rate": 7.298939840686346e-07, + "loss": 0.5148, + "step": 16515 + }, + { + "epoch": 0.6815218288355204, + "grad_norm": 4.327466358253129, + "learning_rate": 7.297219530590061e-07, + "loss": 0.5525, + "step": 16516 + }, + { + "epoch": 0.6815630931748783, + "grad_norm": 1.8178454889819802, + "learning_rate": 7.295499358084697e-07, + "loss": 0.4634, + "step": 16517 + }, + { + "epoch": 0.6816043575142362, + "grad_norm": 2.625652752805894, + "learning_rate": 7.293779323200989e-07, + "loss": 0.5265, + "step": 16518 + }, + { + "epoch": 0.6816456218535941, + "grad_norm": 3.32924444893127, + "learning_rate": 7.292059425969658e-07, + "loss": 0.5162, + "step": 16519 + }, + { + "epoch": 0.681686886192952, + "grad_norm": 6.172890785943666, + "learning_rate": 7.290339666421436e-07, + "loss": 0.4806, + "step": 16520 + }, + { + "epoch": 0.68172815053231, + "grad_norm": 2.454595180089721, + "learning_rate": 7.288620044587028e-07, + "loss": 0.5023, + "step": 16521 + }, + { + "epoch": 0.6817694148716679, + "grad_norm": 4.957002817161758, + "learning_rate": 7.286900560497157e-07, + "loss": 0.46, + "step": 16522 + }, + { + "epoch": 0.6818106792110258, + "grad_norm": 6.851762612043218, + "learning_rate": 7.285181214182537e-07, + "loss": 0.5381, + "step": 16523 + }, + { + "epoch": 0.6818519435503838, + "grad_norm": 14.751262362635888, + "learning_rate": 7.283462005673886e-07, + "loss": 0.5084, + "step": 16524 + }, + { + "epoch": 0.6818932078897417, + "grad_norm": 3.197469060574264, + "learning_rate": 7.281742935001906e-07, + "loss": 0.5513, + "step": 16525 + }, + { + "epoch": 0.6819344722290996, + "grad_norm": 2.445616575029952, + "learning_rate": 7.280024002197301e-07, + "loss": 0.4557, + "step": 16526 + }, + { + "epoch": 0.6819757365684576, + "grad_norm": 2.9383530612097917, + "learning_rate": 7.27830520729078e-07, + "loss": 0.5216, + "step": 16527 + }, + { + "epoch": 0.6820170009078155, + "grad_norm": 22.053121416785256, + "learning_rate": 7.276586550313043e-07, + "loss": 0.5171, + "step": 16528 + }, + { + "epoch": 0.6820582652471734, + "grad_norm": 3.479055810720278, + "learning_rate": 7.274868031294798e-07, + "loss": 0.5071, + "step": 16529 + }, + { + "epoch": 0.6820995295865313, + "grad_norm": 15.172126186382558, + "learning_rate": 7.27314965026673e-07, + "loss": 0.5061, + "step": 16530 + }, + { + "epoch": 0.6821407939258892, + "grad_norm": 2.9103677457488675, + "learning_rate": 7.271431407259539e-07, + "loss": 0.4588, + "step": 16531 + }, + { + "epoch": 0.6821820582652471, + "grad_norm": 2.3413405891759127, + "learning_rate": 7.269713302303922e-07, + "loss": 0.5206, + "step": 16532 + }, + { + "epoch": 0.682223322604605, + "grad_norm": 9.66488858366463, + "learning_rate": 7.267995335430559e-07, + "loss": 0.4965, + "step": 16533 + }, + { + "epoch": 0.6822645869439631, + "grad_norm": 7.28602577718937, + "learning_rate": 7.266277506670141e-07, + "loss": 0.4813, + "step": 16534 + }, + { + "epoch": 0.682305851283321, + "grad_norm": 1.7181838550077946, + "learning_rate": 7.264559816053353e-07, + "loss": 0.4545, + "step": 16535 + }, + { + "epoch": 0.6823471156226789, + "grad_norm": 2.289807637438539, + "learning_rate": 7.262842263610884e-07, + "loss": 0.5092, + "step": 16536 + }, + { + "epoch": 0.6823883799620368, + "grad_norm": 8.816748855856837, + "learning_rate": 7.261124849373407e-07, + "loss": 0.538, + "step": 16537 + }, + { + "epoch": 0.6824296443013947, + "grad_norm": 3.9183286259619226, + "learning_rate": 7.259407573371594e-07, + "loss": 0.5412, + "step": 16538 + }, + { + "epoch": 0.6824709086407527, + "grad_norm": 3.089604740760779, + "learning_rate": 7.257690435636127e-07, + "loss": 0.5038, + "step": 16539 + }, + { + "epoch": 0.6825121729801106, + "grad_norm": 2.639916110027206, + "learning_rate": 7.255973436197677e-07, + "loss": 0.5146, + "step": 16540 + }, + { + "epoch": 0.6825534373194685, + "grad_norm": 3.507552388218188, + "learning_rate": 7.254256575086919e-07, + "loss": 0.5171, + "step": 16541 + }, + { + "epoch": 0.6825947016588264, + "grad_norm": 2.4537169690641156, + "learning_rate": 7.25253985233451e-07, + "loss": 0.5039, + "step": 16542 + }, + { + "epoch": 0.6826359659981843, + "grad_norm": 3.2278379790057916, + "learning_rate": 7.250823267971121e-07, + "loss": 0.5055, + "step": 16543 + }, + { + "epoch": 0.6826772303375424, + "grad_norm": 2.7936530086137807, + "learning_rate": 7.249106822027413e-07, + "loss": 0.4887, + "step": 16544 + }, + { + "epoch": 0.6827184946769003, + "grad_norm": 2.651947942827021, + "learning_rate": 7.247390514534053e-07, + "loss": 0.5382, + "step": 16545 + }, + { + "epoch": 0.6827597590162582, + "grad_norm": 18.0541465555452, + "learning_rate": 7.245674345521686e-07, + "loss": 0.5102, + "step": 16546 + }, + { + "epoch": 0.6828010233556161, + "grad_norm": 4.6519917587268615, + "learning_rate": 7.243958315020974e-07, + "loss": 0.5083, + "step": 16547 + }, + { + "epoch": 0.682842287694974, + "grad_norm": 3.3080930910329607, + "learning_rate": 7.242242423062571e-07, + "loss": 0.5179, + "step": 16548 + }, + { + "epoch": 0.6828835520343319, + "grad_norm": 2.169796693979031, + "learning_rate": 7.240526669677129e-07, + "loss": 0.4971, + "step": 16549 + }, + { + "epoch": 0.6829248163736898, + "grad_norm": 3.4587365855391377, + "learning_rate": 7.238811054895292e-07, + "loss": 0.5386, + "step": 16550 + }, + { + "epoch": 0.6829660807130478, + "grad_norm": 3.3776796856788467, + "learning_rate": 7.237095578747701e-07, + "loss": 0.4969, + "step": 16551 + }, + { + "epoch": 0.6830073450524057, + "grad_norm": 10.60866294329079, + "learning_rate": 7.235380241265e-07, + "loss": 0.5489, + "step": 16552 + }, + { + "epoch": 0.6830486093917636, + "grad_norm": 2.400349700564593, + "learning_rate": 7.233665042477841e-07, + "loss": 0.5732, + "step": 16553 + }, + { + "epoch": 0.6830898737311215, + "grad_norm": 10.384275222576663, + "learning_rate": 7.231949982416844e-07, + "loss": 0.5582, + "step": 16554 + }, + { + "epoch": 0.6831311380704795, + "grad_norm": 3.267892796376507, + "learning_rate": 7.230235061112654e-07, + "loss": 0.5251, + "step": 16555 + }, + { + "epoch": 0.6831724024098375, + "grad_norm": 2.7708632309248764, + "learning_rate": 7.228520278595902e-07, + "loss": 0.54, + "step": 16556 + }, + { + "epoch": 0.6832136667491954, + "grad_norm": 4.19560153930522, + "learning_rate": 7.226805634897228e-07, + "loss": 0.4937, + "step": 16557 + }, + { + "epoch": 0.6832549310885533, + "grad_norm": 23.050558825976267, + "learning_rate": 7.225091130047242e-07, + "loss": 0.5454, + "step": 16558 + }, + { + "epoch": 0.6832961954279112, + "grad_norm": 3.313623308810371, + "learning_rate": 7.223376764076579e-07, + "loss": 0.4902, + "step": 16559 + }, + { + "epoch": 0.6833374597672691, + "grad_norm": 2.2981989342672713, + "learning_rate": 7.221662537015862e-07, + "loss": 0.5442, + "step": 16560 + }, + { + "epoch": 0.683378724106627, + "grad_norm": 3.332022834663276, + "learning_rate": 7.219948448895715e-07, + "loss": 0.5436, + "step": 16561 + }, + { + "epoch": 0.683419988445985, + "grad_norm": 13.580765987120857, + "learning_rate": 7.218234499746747e-07, + "loss": 0.5636, + "step": 16562 + }, + { + "epoch": 0.6834612527853429, + "grad_norm": 2.8890283709052578, + "learning_rate": 7.216520689599582e-07, + "loss": 0.501, + "step": 16563 + }, + { + "epoch": 0.6835025171247008, + "grad_norm": 7.382987862398876, + "learning_rate": 7.214807018484824e-07, + "loss": 0.5449, + "step": 16564 + }, + { + "epoch": 0.6835437814640588, + "grad_norm": 1.9241846110614165, + "learning_rate": 7.213093486433088e-07, + "loss": 0.5037, + "step": 16565 + }, + { + "epoch": 0.6835850458034167, + "grad_norm": 10.540752938086158, + "learning_rate": 7.211380093474987e-07, + "loss": 0.5688, + "step": 16566 + }, + { + "epoch": 0.6836263101427746, + "grad_norm": 3.556304688029821, + "learning_rate": 7.209666839641118e-07, + "loss": 0.5199, + "step": 16567 + }, + { + "epoch": 0.6836675744821326, + "grad_norm": 2.260552378029069, + "learning_rate": 7.207953724962087e-07, + "loss": 0.4814, + "step": 16568 + }, + { + "epoch": 0.6837088388214905, + "grad_norm": 4.1522223727205745, + "learning_rate": 7.2062407494685e-07, + "loss": 0.5217, + "step": 16569 + }, + { + "epoch": 0.6837501031608484, + "grad_norm": 3.3651310992705357, + "learning_rate": 7.204527913190947e-07, + "loss": 0.5256, + "step": 16570 + }, + { + "epoch": 0.6837913675002063, + "grad_norm": 2.5801582558946747, + "learning_rate": 7.202815216160024e-07, + "loss": 0.554, + "step": 16571 + }, + { + "epoch": 0.6838326318395642, + "grad_norm": 2.21677711152501, + "learning_rate": 7.201102658406327e-07, + "loss": 0.5523, + "step": 16572 + }, + { + "epoch": 0.6838738961789221, + "grad_norm": 2.745335889711262, + "learning_rate": 7.199390239960453e-07, + "loss": 0.5273, + "step": 16573 + }, + { + "epoch": 0.68391516051828, + "grad_norm": 6.029434634813341, + "learning_rate": 7.197677960852977e-07, + "loss": 0.496, + "step": 16574 + }, + { + "epoch": 0.6839564248576381, + "grad_norm": 2.857328874255133, + "learning_rate": 7.195965821114492e-07, + "loss": 0.5303, + "step": 16575 + }, + { + "epoch": 0.683997689196996, + "grad_norm": 2.984369676745144, + "learning_rate": 7.194253820775579e-07, + "loss": 0.5363, + "step": 16576 + }, + { + "epoch": 0.6840389535363539, + "grad_norm": 3.386621059093279, + "learning_rate": 7.192541959866826e-07, + "loss": 0.4973, + "step": 16577 + }, + { + "epoch": 0.6840802178757118, + "grad_norm": 2.884055357351444, + "learning_rate": 7.190830238418805e-07, + "loss": 0.4869, + "step": 16578 + }, + { + "epoch": 0.6841214822150697, + "grad_norm": 2.1800023713977223, + "learning_rate": 7.189118656462085e-07, + "loss": 0.4709, + "step": 16579 + }, + { + "epoch": 0.6841627465544277, + "grad_norm": 4.033960854547714, + "learning_rate": 7.187407214027248e-07, + "loss": 0.522, + "step": 16580 + }, + { + "epoch": 0.6842040108937856, + "grad_norm": 2.2531307574048767, + "learning_rate": 7.18569591114486e-07, + "loss": 0.4987, + "step": 16581 + }, + { + "epoch": 0.6842452752331435, + "grad_norm": 8.877071770765992, + "learning_rate": 7.183984747845497e-07, + "loss": 0.5416, + "step": 16582 + }, + { + "epoch": 0.6842865395725014, + "grad_norm": 2.782094273610501, + "learning_rate": 7.182273724159716e-07, + "loss": 0.5888, + "step": 16583 + }, + { + "epoch": 0.6843278039118593, + "grad_norm": 2.9025200658489547, + "learning_rate": 7.180562840118079e-07, + "loss": 0.5443, + "step": 16584 + }, + { + "epoch": 0.6843690682512173, + "grad_norm": 6.283703369113721, + "learning_rate": 7.178852095751154e-07, + "loss": 0.5129, + "step": 16585 + }, + { + "epoch": 0.6844103325905753, + "grad_norm": 2.079193986437291, + "learning_rate": 7.177141491089502e-07, + "loss": 0.4797, + "step": 16586 + }, + { + "epoch": 0.6844515969299332, + "grad_norm": 4.18869481093881, + "learning_rate": 7.175431026163665e-07, + "loss": 0.5693, + "step": 16587 + }, + { + "epoch": 0.6844928612692911, + "grad_norm": 2.384476104957109, + "learning_rate": 7.173720701004205e-07, + "loss": 0.4687, + "step": 16588 + }, + { + "epoch": 0.684534125608649, + "grad_norm": 6.034275263538459, + "learning_rate": 7.172010515641672e-07, + "loss": 0.4784, + "step": 16589 + }, + { + "epoch": 0.6845753899480069, + "grad_norm": 2.3320882227104303, + "learning_rate": 7.170300470106626e-07, + "loss": 0.5536, + "step": 16590 + }, + { + "epoch": 0.6846166542873648, + "grad_norm": 4.785485166991143, + "learning_rate": 7.168590564429587e-07, + "loss": 0.4934, + "step": 16591 + }, + { + "epoch": 0.6846579186267228, + "grad_norm": 2.102828409456198, + "learning_rate": 7.166880798641114e-07, + "loss": 0.5649, + "step": 16592 + }, + { + "epoch": 0.6846991829660807, + "grad_norm": 4.083057114521644, + "learning_rate": 7.165171172771744e-07, + "loss": 0.5269, + "step": 16593 + }, + { + "epoch": 0.6847404473054386, + "grad_norm": 33.77092381145895, + "learning_rate": 7.163461686852021e-07, + "loss": 0.52, + "step": 16594 + }, + { + "epoch": 0.6847817116447966, + "grad_norm": 3.6377594207839135, + "learning_rate": 7.161752340912473e-07, + "loss": 0.5011, + "step": 16595 + }, + { + "epoch": 0.6848229759841545, + "grad_norm": 2.4284238239276696, + "learning_rate": 7.160043134983635e-07, + "loss": 0.5741, + "step": 16596 + }, + { + "epoch": 0.6848642403235125, + "grad_norm": 2.2424259739452994, + "learning_rate": 7.15833406909604e-07, + "loss": 0.5457, + "step": 16597 + }, + { + "epoch": 0.6849055046628704, + "grad_norm": 2.5106481068547266, + "learning_rate": 7.15662514328022e-07, + "loss": 0.5157, + "step": 16598 + }, + { + "epoch": 0.6849467690022283, + "grad_norm": 2.157036503028343, + "learning_rate": 7.154916357566692e-07, + "loss": 0.4486, + "step": 16599 + }, + { + "epoch": 0.6849880333415862, + "grad_norm": 2.458545783432109, + "learning_rate": 7.153207711985981e-07, + "loss": 0.4812, + "step": 16600 + }, + { + "epoch": 0.6850292976809441, + "grad_norm": 7.140560314225437, + "learning_rate": 7.151499206568613e-07, + "loss": 0.5293, + "step": 16601 + }, + { + "epoch": 0.685070562020302, + "grad_norm": 2.3328110036756002, + "learning_rate": 7.149790841345106e-07, + "loss": 0.5159, + "step": 16602 + }, + { + "epoch": 0.68511182635966, + "grad_norm": 3.1602594959548838, + "learning_rate": 7.148082616345973e-07, + "loss": 0.4765, + "step": 16603 + }, + { + "epoch": 0.6851530906990179, + "grad_norm": 2.486281927356425, + "learning_rate": 7.146374531601724e-07, + "loss": 0.5303, + "step": 16604 + }, + { + "epoch": 0.6851943550383759, + "grad_norm": 3.7281616658559056, + "learning_rate": 7.14466658714287e-07, + "loss": 0.5076, + "step": 16605 + }, + { + "epoch": 0.6852356193777338, + "grad_norm": 3.3759708173208804, + "learning_rate": 7.142958782999923e-07, + "loss": 0.5574, + "step": 16606 + }, + { + "epoch": 0.6852768837170917, + "grad_norm": 3.709388290686832, + "learning_rate": 7.141251119203393e-07, + "loss": 0.5199, + "step": 16607 + }, + { + "epoch": 0.6853181480564496, + "grad_norm": 6.792383353086842, + "learning_rate": 7.139543595783772e-07, + "loss": 0.4727, + "step": 16608 + }, + { + "epoch": 0.6853594123958076, + "grad_norm": 3.450572264725482, + "learning_rate": 7.137836212771565e-07, + "loss": 0.5093, + "step": 16609 + }, + { + "epoch": 0.6854006767351655, + "grad_norm": 5.774347513797664, + "learning_rate": 7.136128970197277e-07, + "loss": 0.4881, + "step": 16610 + }, + { + "epoch": 0.6854419410745234, + "grad_norm": 2.1242024618304503, + "learning_rate": 7.134421868091394e-07, + "loss": 0.5045, + "step": 16611 + }, + { + "epoch": 0.6854832054138813, + "grad_norm": 3.116901751036177, + "learning_rate": 7.132714906484412e-07, + "loss": 0.5008, + "step": 16612 + }, + { + "epoch": 0.6855244697532392, + "grad_norm": 3.9339156266690285, + "learning_rate": 7.131008085406824e-07, + "loss": 0.5094, + "step": 16613 + }, + { + "epoch": 0.6855657340925971, + "grad_norm": 3.838257874344673, + "learning_rate": 7.12930140488912e-07, + "loss": 0.4796, + "step": 16614 + }, + { + "epoch": 0.685606998431955, + "grad_norm": 4.457043973646775, + "learning_rate": 7.127594864961779e-07, + "loss": 0.5245, + "step": 16615 + }, + { + "epoch": 0.6856482627713131, + "grad_norm": 2.612202880857009, + "learning_rate": 7.125888465655291e-07, + "loss": 0.5437, + "step": 16616 + }, + { + "epoch": 0.685689527110671, + "grad_norm": 2.7961949316142327, + "learning_rate": 7.12418220700013e-07, + "loss": 0.4534, + "step": 16617 + }, + { + "epoch": 0.6857307914500289, + "grad_norm": 2.7717706637082844, + "learning_rate": 7.122476089026776e-07, + "loss": 0.5461, + "step": 16618 + }, + { + "epoch": 0.6857720557893868, + "grad_norm": 3.098234374171535, + "learning_rate": 7.12077011176571e-07, + "loss": 0.5769, + "step": 16619 + }, + { + "epoch": 0.6858133201287447, + "grad_norm": 2.1464435779216693, + "learning_rate": 7.119064275247397e-07, + "loss": 0.525, + "step": 16620 + }, + { + "epoch": 0.6858545844681027, + "grad_norm": 3.4153261521844174, + "learning_rate": 7.11735857950231e-07, + "loss": 0.5074, + "step": 16621 + }, + { + "epoch": 0.6858958488074606, + "grad_norm": 3.4678861670291967, + "learning_rate": 7.115653024560917e-07, + "loss": 0.5088, + "step": 16622 + }, + { + "epoch": 0.6859371131468185, + "grad_norm": 9.322073440186339, + "learning_rate": 7.113947610453691e-07, + "loss": 0.5402, + "step": 16623 + }, + { + "epoch": 0.6859783774861764, + "grad_norm": 2.9330543449714646, + "learning_rate": 7.112242337211085e-07, + "loss": 0.517, + "step": 16624 + }, + { + "epoch": 0.6860196418255343, + "grad_norm": 5.810047353559213, + "learning_rate": 7.110537204863559e-07, + "loss": 0.4455, + "step": 16625 + }, + { + "epoch": 0.6860609061648923, + "grad_norm": 1.9274802932887147, + "learning_rate": 7.108832213441578e-07, + "loss": 0.476, + "step": 16626 + }, + { + "epoch": 0.6861021705042503, + "grad_norm": 2.87474004992988, + "learning_rate": 7.1071273629756e-07, + "loss": 0.479, + "step": 16627 + }, + { + "epoch": 0.6861434348436082, + "grad_norm": 3.534806936522572, + "learning_rate": 7.105422653496065e-07, + "loss": 0.516, + "step": 16628 + }, + { + "epoch": 0.6861846991829661, + "grad_norm": 2.5395372644986907, + "learning_rate": 7.103718085033437e-07, + "loss": 0.4473, + "step": 16629 + }, + { + "epoch": 0.686225963522324, + "grad_norm": 2.95905687862905, + "learning_rate": 7.102013657618153e-07, + "loss": 0.5537, + "step": 16630 + }, + { + "epoch": 0.6862672278616819, + "grad_norm": 7.658574059335029, + "learning_rate": 7.100309371280668e-07, + "loss": 0.5853, + "step": 16631 + }, + { + "epoch": 0.6863084922010398, + "grad_norm": 2.920079359261253, + "learning_rate": 7.098605226051412e-07, + "loss": 0.5436, + "step": 16632 + }, + { + "epoch": 0.6863497565403978, + "grad_norm": 4.56041549857123, + "learning_rate": 7.096901221960835e-07, + "loss": 0.4827, + "step": 16633 + }, + { + "epoch": 0.6863910208797557, + "grad_norm": 5.193501370348299, + "learning_rate": 7.095197359039371e-07, + "loss": 0.4838, + "step": 16634 + }, + { + "epoch": 0.6864322852191136, + "grad_norm": 5.101314960307764, + "learning_rate": 7.093493637317464e-07, + "loss": 0.5648, + "step": 16635 + }, + { + "epoch": 0.6864735495584716, + "grad_norm": 2.5036583721841135, + "learning_rate": 7.091790056825533e-07, + "loss": 0.4877, + "step": 16636 + }, + { + "epoch": 0.6865148138978295, + "grad_norm": 3.1311951473459114, + "learning_rate": 7.090086617594017e-07, + "loss": 0.5547, + "step": 16637 + }, + { + "epoch": 0.6865560782371875, + "grad_norm": 4.684640577228099, + "learning_rate": 7.088383319653339e-07, + "loss": 0.5225, + "step": 16638 + }, + { + "epoch": 0.6865973425765454, + "grad_norm": 2.3821799002032273, + "learning_rate": 7.086680163033934e-07, + "loss": 0.5434, + "step": 16639 + }, + { + "epoch": 0.6866386069159033, + "grad_norm": 2.7205711916998747, + "learning_rate": 7.08497714776621e-07, + "loss": 0.519, + "step": 16640 + }, + { + "epoch": 0.6866798712552612, + "grad_norm": 2.9293132853426034, + "learning_rate": 7.083274273880597e-07, + "loss": 0.5355, + "step": 16641 + }, + { + "epoch": 0.6867211355946191, + "grad_norm": 4.215892368955524, + "learning_rate": 7.081571541407514e-07, + "loss": 0.5225, + "step": 16642 + }, + { + "epoch": 0.686762399933977, + "grad_norm": 2.204408744105485, + "learning_rate": 7.079868950377367e-07, + "loss": 0.5348, + "step": 16643 + }, + { + "epoch": 0.686803664273335, + "grad_norm": 5.261052745369977, + "learning_rate": 7.07816650082058e-07, + "loss": 0.5036, + "step": 16644 + }, + { + "epoch": 0.6868449286126929, + "grad_norm": 2.802634000644956, + "learning_rate": 7.076464192767549e-07, + "loss": 0.587, + "step": 16645 + }, + { + "epoch": 0.6868861929520509, + "grad_norm": 5.447570177171041, + "learning_rate": 7.074762026248691e-07, + "loss": 0.5232, + "step": 16646 + }, + { + "epoch": 0.6869274572914088, + "grad_norm": 1.8685672576546364, + "learning_rate": 7.073060001294415e-07, + "loss": 0.4454, + "step": 16647 + }, + { + "epoch": 0.6869687216307667, + "grad_norm": 5.033179287728966, + "learning_rate": 7.07135811793511e-07, + "loss": 0.5138, + "step": 16648 + }, + { + "epoch": 0.6870099859701246, + "grad_norm": 10.82364581636334, + "learning_rate": 7.069656376201186e-07, + "loss": 0.5047, + "step": 16649 + }, + { + "epoch": 0.6870512503094826, + "grad_norm": 3.438676746974905, + "learning_rate": 7.067954776123037e-07, + "loss": 0.5657, + "step": 16650 + }, + { + "epoch": 0.6870925146488405, + "grad_norm": 2.4968902285995758, + "learning_rate": 7.066253317731063e-07, + "loss": 0.5234, + "step": 16651 + }, + { + "epoch": 0.6871337789881984, + "grad_norm": 5.363113661755067, + "learning_rate": 7.064552001055646e-07, + "loss": 0.5469, + "step": 16652 + }, + { + "epoch": 0.6871750433275563, + "grad_norm": 2.0699766739991667, + "learning_rate": 7.062850826127182e-07, + "loss": 0.5325, + "step": 16653 + }, + { + "epoch": 0.6872163076669142, + "grad_norm": 4.320498941059592, + "learning_rate": 7.06114979297606e-07, + "loss": 0.5327, + "step": 16654 + }, + { + "epoch": 0.6872575720062721, + "grad_norm": 1.977797971812188, + "learning_rate": 7.059448901632666e-07, + "loss": 0.5046, + "step": 16655 + }, + { + "epoch": 0.6872988363456302, + "grad_norm": 2.5716796382782223, + "learning_rate": 7.05774815212738e-07, + "loss": 0.5722, + "step": 16656 + }, + { + "epoch": 0.6873401006849881, + "grad_norm": 6.958892027664447, + "learning_rate": 7.056047544490575e-07, + "loss": 0.548, + "step": 16657 + }, + { + "epoch": 0.687381365024346, + "grad_norm": 3.19990801173843, + "learning_rate": 7.054347078752632e-07, + "loss": 0.4553, + "step": 16658 + }, + { + "epoch": 0.6874226293637039, + "grad_norm": 1.881769460922296, + "learning_rate": 7.05264675494393e-07, + "loss": 0.4717, + "step": 16659 + }, + { + "epoch": 0.6874638937030618, + "grad_norm": 1.8006974868562877, + "learning_rate": 7.050946573094842e-07, + "loss": 0.4943, + "step": 16660 + }, + { + "epoch": 0.6875051580424197, + "grad_norm": 28.93898263332733, + "learning_rate": 7.049246533235727e-07, + "loss": 0.5563, + "step": 16661 + }, + { + "epoch": 0.6875464223817777, + "grad_norm": 4.417651349085973, + "learning_rate": 7.047546635396961e-07, + "loss": 0.5406, + "step": 16662 + }, + { + "epoch": 0.6875876867211356, + "grad_norm": 2.13241286103345, + "learning_rate": 7.045846879608905e-07, + "loss": 0.4666, + "step": 16663 + }, + { + "epoch": 0.6876289510604935, + "grad_norm": 2.775095960662998, + "learning_rate": 7.044147265901927e-07, + "loss": 0.5753, + "step": 16664 + }, + { + "epoch": 0.6876702153998514, + "grad_norm": 3.872393417278568, + "learning_rate": 7.042447794306376e-07, + "loss": 0.5144, + "step": 16665 + }, + { + "epoch": 0.6877114797392094, + "grad_norm": 4.028668703182924, + "learning_rate": 7.040748464852615e-07, + "loss": 0.5207, + "step": 16666 + }, + { + "epoch": 0.6877527440785673, + "grad_norm": 2.860798810346824, + "learning_rate": 7.039049277570997e-07, + "loss": 0.507, + "step": 16667 + }, + { + "epoch": 0.6877940084179253, + "grad_norm": 5.104905865538464, + "learning_rate": 7.037350232491879e-07, + "loss": 0.5205, + "step": 16668 + }, + { + "epoch": 0.6878352727572832, + "grad_norm": 2.192444823382992, + "learning_rate": 7.035651329645606e-07, + "loss": 0.5752, + "step": 16669 + }, + { + "epoch": 0.6878765370966411, + "grad_norm": 2.427174119215608, + "learning_rate": 7.033952569062519e-07, + "loss": 0.5544, + "step": 16670 + }, + { + "epoch": 0.687917801435999, + "grad_norm": 3.39932244757389, + "learning_rate": 7.032253950772965e-07, + "loss": 0.4735, + "step": 16671 + }, + { + "epoch": 0.6879590657753569, + "grad_norm": 7.604531707797934, + "learning_rate": 7.030555474807296e-07, + "loss": 0.5494, + "step": 16672 + }, + { + "epoch": 0.6880003301147148, + "grad_norm": 6.918472510396035, + "learning_rate": 7.028857141195836e-07, + "loss": 0.5548, + "step": 16673 + }, + { + "epoch": 0.6880415944540728, + "grad_norm": 3.181498152069659, + "learning_rate": 7.027158949968928e-07, + "loss": 0.5443, + "step": 16674 + }, + { + "epoch": 0.6880828587934307, + "grad_norm": 2.149441734900831, + "learning_rate": 7.025460901156906e-07, + "loss": 0.524, + "step": 16675 + }, + { + "epoch": 0.6881241231327886, + "grad_norm": 6.931443608893888, + "learning_rate": 7.023762994790106e-07, + "loss": 0.5112, + "step": 16676 + }, + { + "epoch": 0.6881653874721466, + "grad_norm": 2.661996057417076, + "learning_rate": 7.022065230898849e-07, + "loss": 0.4707, + "step": 16677 + }, + { + "epoch": 0.6882066518115045, + "grad_norm": 3.833711845781131, + "learning_rate": 7.020367609513465e-07, + "loss": 0.5171, + "step": 16678 + }, + { + "epoch": 0.6882479161508624, + "grad_norm": 2.376468932107412, + "learning_rate": 7.018670130664275e-07, + "loss": 0.524, + "step": 16679 + }, + { + "epoch": 0.6882891804902204, + "grad_norm": 3.122900237784603, + "learning_rate": 7.01697279438161e-07, + "loss": 0.4964, + "step": 16680 + }, + { + "epoch": 0.6883304448295783, + "grad_norm": 4.813523351740768, + "learning_rate": 7.015275600695775e-07, + "loss": 0.4974, + "step": 16681 + }, + { + "epoch": 0.6883717091689362, + "grad_norm": 2.010837785125301, + "learning_rate": 7.013578549637098e-07, + "loss": 0.5716, + "step": 16682 + }, + { + "epoch": 0.6884129735082941, + "grad_norm": 2.7722724212520182, + "learning_rate": 7.011881641235883e-07, + "loss": 0.5365, + "step": 16683 + }, + { + "epoch": 0.688454237847652, + "grad_norm": 3.5851122074906896, + "learning_rate": 7.010184875522444e-07, + "loss": 0.5766, + "step": 16684 + }, + { + "epoch": 0.68849550218701, + "grad_norm": 2.6782421426804035, + "learning_rate": 7.008488252527096e-07, + "loss": 0.4778, + "step": 16685 + }, + { + "epoch": 0.6885367665263679, + "grad_norm": 2.837723458793877, + "learning_rate": 7.006791772280134e-07, + "loss": 0.5664, + "step": 16686 + }, + { + "epoch": 0.6885780308657259, + "grad_norm": 3.566270522430965, + "learning_rate": 7.005095434811868e-07, + "loss": 0.5147, + "step": 16687 + }, + { + "epoch": 0.6886192952050838, + "grad_norm": 5.619539554755721, + "learning_rate": 7.003399240152601e-07, + "loss": 0.482, + "step": 16688 + }, + { + "epoch": 0.6886605595444417, + "grad_norm": 2.364020381682851, + "learning_rate": 7.001703188332624e-07, + "loss": 0.4992, + "step": 16689 + }, + { + "epoch": 0.6887018238837996, + "grad_norm": 3.1799452671309907, + "learning_rate": 7.000007279382237e-07, + "loss": 0.554, + "step": 16690 + }, + { + "epoch": 0.6887430882231576, + "grad_norm": 3.79539687289845, + "learning_rate": 6.998311513331732e-07, + "loss": 0.4945, + "step": 16691 + }, + { + "epoch": 0.6887843525625155, + "grad_norm": 2.6936074809768025, + "learning_rate": 6.996615890211406e-07, + "loss": 0.5329, + "step": 16692 + }, + { + "epoch": 0.6888256169018734, + "grad_norm": 2.6017980437153585, + "learning_rate": 6.994920410051537e-07, + "loss": 0.5482, + "step": 16693 + }, + { + "epoch": 0.6888668812412313, + "grad_norm": 1.8530772962586795, + "learning_rate": 6.993225072882414e-07, + "loss": 0.4578, + "step": 16694 + }, + { + "epoch": 0.6889081455805892, + "grad_norm": 3.0897941503378967, + "learning_rate": 6.991529878734328e-07, + "loss": 0.5007, + "step": 16695 + }, + { + "epoch": 0.6889494099199471, + "grad_norm": 3.6976625238928222, + "learning_rate": 6.989834827637546e-07, + "loss": 0.5826, + "step": 16696 + }, + { + "epoch": 0.6889906742593052, + "grad_norm": 3.7748705517059737, + "learning_rate": 6.988139919622358e-07, + "loss": 0.4947, + "step": 16697 + }, + { + "epoch": 0.6890319385986631, + "grad_norm": 2.081052060305835, + "learning_rate": 6.986445154719028e-07, + "loss": 0.5056, + "step": 16698 + }, + { + "epoch": 0.689073202938021, + "grad_norm": 2.6545050181306604, + "learning_rate": 6.984750532957835e-07, + "loss": 0.4989, + "step": 16699 + }, + { + "epoch": 0.6891144672773789, + "grad_norm": 3.771235242331204, + "learning_rate": 6.983056054369048e-07, + "loss": 0.5038, + "step": 16700 + }, + { + "epoch": 0.6891557316167368, + "grad_norm": 2.313387439295087, + "learning_rate": 6.981361718982942e-07, + "loss": 0.5154, + "step": 16701 + }, + { + "epoch": 0.6891969959560947, + "grad_norm": 2.6524983018638983, + "learning_rate": 6.979667526829768e-07, + "loss": 0.5348, + "step": 16702 + }, + { + "epoch": 0.6892382602954527, + "grad_norm": 2.2776584053083373, + "learning_rate": 6.977973477939796e-07, + "loss": 0.5459, + "step": 16703 + }, + { + "epoch": 0.6892795246348106, + "grad_norm": 6.806849861539801, + "learning_rate": 6.976279572343286e-07, + "loss": 0.4833, + "step": 16704 + }, + { + "epoch": 0.6893207889741685, + "grad_norm": 3.0756302010694876, + "learning_rate": 6.974585810070501e-07, + "loss": 0.5433, + "step": 16705 + }, + { + "epoch": 0.6893620533135264, + "grad_norm": 2.1169550515935964, + "learning_rate": 6.972892191151685e-07, + "loss": 0.4936, + "step": 16706 + }, + { + "epoch": 0.6894033176528844, + "grad_norm": 4.3819041552087965, + "learning_rate": 6.971198715617093e-07, + "loss": 0.5587, + "step": 16707 + }, + { + "epoch": 0.6894445819922423, + "grad_norm": 2.310236907889004, + "learning_rate": 6.969505383496984e-07, + "loss": 0.5135, + "step": 16708 + }, + { + "epoch": 0.6894858463316003, + "grad_norm": 1.9015329971609665, + "learning_rate": 6.967812194821598e-07, + "loss": 0.4512, + "step": 16709 + }, + { + "epoch": 0.6895271106709582, + "grad_norm": 2.6678862234578156, + "learning_rate": 6.966119149621174e-07, + "loss": 0.5383, + "step": 16710 + }, + { + "epoch": 0.6895683750103161, + "grad_norm": 4.298947608449495, + "learning_rate": 6.96442624792596e-07, + "loss": 0.5484, + "step": 16711 + }, + { + "epoch": 0.689609639349674, + "grad_norm": 6.629805247391072, + "learning_rate": 6.962733489766195e-07, + "loss": 0.5258, + "step": 16712 + }, + { + "epoch": 0.6896509036890319, + "grad_norm": 2.620341599359256, + "learning_rate": 6.961040875172122e-07, + "loss": 0.5355, + "step": 16713 + }, + { + "epoch": 0.6896921680283898, + "grad_norm": 2.2931048876970554, + "learning_rate": 6.959348404173963e-07, + "loss": 0.4624, + "step": 16714 + }, + { + "epoch": 0.6897334323677478, + "grad_norm": 2.5866437167213006, + "learning_rate": 6.957656076801956e-07, + "loss": 0.5488, + "step": 16715 + }, + { + "epoch": 0.6897746967071057, + "grad_norm": 2.944541170429247, + "learning_rate": 6.955963893086332e-07, + "loss": 0.5069, + "step": 16716 + }, + { + "epoch": 0.6898159610464637, + "grad_norm": 2.506063350313452, + "learning_rate": 6.954271853057319e-07, + "loss": 0.536, + "step": 16717 + }, + { + "epoch": 0.6898572253858216, + "grad_norm": 2.579881720181599, + "learning_rate": 6.952579956745133e-07, + "loss": 0.5499, + "step": 16718 + }, + { + "epoch": 0.6898984897251795, + "grad_norm": 1.8229117147550722, + "learning_rate": 6.950888204180001e-07, + "loss": 0.4353, + "step": 16719 + }, + { + "epoch": 0.6899397540645374, + "grad_norm": 9.979185488172615, + "learning_rate": 6.949196595392141e-07, + "loss": 0.4976, + "step": 16720 + }, + { + "epoch": 0.6899810184038954, + "grad_norm": 2.4555296938862523, + "learning_rate": 6.947505130411776e-07, + "loss": 0.4641, + "step": 16721 + }, + { + "epoch": 0.6900222827432533, + "grad_norm": 3.2174339809267414, + "learning_rate": 6.945813809269114e-07, + "loss": 0.5227, + "step": 16722 + }, + { + "epoch": 0.6900635470826112, + "grad_norm": 3.2535738831736927, + "learning_rate": 6.944122631994361e-07, + "loss": 0.5236, + "step": 16723 + }, + { + "epoch": 0.6901048114219691, + "grad_norm": 3.084768542022548, + "learning_rate": 6.94243159861773e-07, + "loss": 0.5334, + "step": 16724 + }, + { + "epoch": 0.690146075761327, + "grad_norm": 2.028659104068141, + "learning_rate": 6.940740709169431e-07, + "loss": 0.495, + "step": 16725 + }, + { + "epoch": 0.6901873401006849, + "grad_norm": 2.1405562342018003, + "learning_rate": 6.939049963679661e-07, + "loss": 0.5071, + "step": 16726 + }, + { + "epoch": 0.690228604440043, + "grad_norm": 3.733230918233907, + "learning_rate": 6.937359362178623e-07, + "loss": 0.588, + "step": 16727 + }, + { + "epoch": 0.6902698687794009, + "grad_norm": 5.795748427802153, + "learning_rate": 6.935668904696517e-07, + "loss": 0.5298, + "step": 16728 + }, + { + "epoch": 0.6903111331187588, + "grad_norm": 6.266419431718925, + "learning_rate": 6.933978591263544e-07, + "loss": 0.5807, + "step": 16729 + }, + { + "epoch": 0.6903523974581167, + "grad_norm": 3.978515230810951, + "learning_rate": 6.932288421909885e-07, + "loss": 0.518, + "step": 16730 + }, + { + "epoch": 0.6903936617974746, + "grad_norm": 3.6363683524320836, + "learning_rate": 6.930598396665738e-07, + "loss": 0.4775, + "step": 16731 + }, + { + "epoch": 0.6904349261368325, + "grad_norm": 3.707234656249791, + "learning_rate": 6.928908515561288e-07, + "loss": 0.4861, + "step": 16732 + }, + { + "epoch": 0.6904761904761905, + "grad_norm": 2.631198154965697, + "learning_rate": 6.927218778626732e-07, + "loss": 0.5084, + "step": 16733 + }, + { + "epoch": 0.6905174548155484, + "grad_norm": 3.194849071866807, + "learning_rate": 6.925529185892241e-07, + "loss": 0.5119, + "step": 16734 + }, + { + "epoch": 0.6905587191549063, + "grad_norm": 6.7471021728636575, + "learning_rate": 6.923839737387992e-07, + "loss": 0.5345, + "step": 16735 + }, + { + "epoch": 0.6905999834942642, + "grad_norm": 11.58917388361877, + "learning_rate": 6.92215043314417e-07, + "loss": 0.5475, + "step": 16736 + }, + { + "epoch": 0.6906412478336221, + "grad_norm": 7.604935575798978, + "learning_rate": 6.920461273190949e-07, + "loss": 0.5591, + "step": 16737 + }, + { + "epoch": 0.6906825121729802, + "grad_norm": 3.530919684004645, + "learning_rate": 6.918772257558508e-07, + "loss": 0.5013, + "step": 16738 + }, + { + "epoch": 0.6907237765123381, + "grad_norm": 2.1213523014693565, + "learning_rate": 6.917083386277003e-07, + "loss": 0.4606, + "step": 16739 + }, + { + "epoch": 0.690765040851696, + "grad_norm": 2.59500230466129, + "learning_rate": 6.91539465937661e-07, + "loss": 0.5531, + "step": 16740 + }, + { + "epoch": 0.6908063051910539, + "grad_norm": 3.038912011054571, + "learning_rate": 6.913706076887492e-07, + "loss": 0.4736, + "step": 16741 + }, + { + "epoch": 0.6908475695304118, + "grad_norm": 3.612538230258736, + "learning_rate": 6.912017638839819e-07, + "loss": 0.5167, + "step": 16742 + }, + { + "epoch": 0.6908888338697697, + "grad_norm": 3.450549560942588, + "learning_rate": 6.910329345263738e-07, + "loss": 0.5731, + "step": 16743 + }, + { + "epoch": 0.6909300982091277, + "grad_norm": 2.592750605146295, + "learning_rate": 6.908641196189413e-07, + "loss": 0.5384, + "step": 16744 + }, + { + "epoch": 0.6909713625484856, + "grad_norm": 3.5159473779734927, + "learning_rate": 6.906953191646997e-07, + "loss": 0.5269, + "step": 16745 + }, + { + "epoch": 0.6910126268878435, + "grad_norm": 5.501021070971378, + "learning_rate": 6.905265331666648e-07, + "loss": 0.4871, + "step": 16746 + }, + { + "epoch": 0.6910538912272014, + "grad_norm": 3.346732195101071, + "learning_rate": 6.903577616278505e-07, + "loss": 0.4779, + "step": 16747 + }, + { + "epoch": 0.6910951555665594, + "grad_norm": 1.731194979475438, + "learning_rate": 6.901890045512725e-07, + "loss": 0.4955, + "step": 16748 + }, + { + "epoch": 0.6911364199059173, + "grad_norm": 5.7368197670079955, + "learning_rate": 6.900202619399442e-07, + "loss": 0.4756, + "step": 16749 + }, + { + "epoch": 0.6911776842452753, + "grad_norm": 3.976731751429204, + "learning_rate": 6.898515337968809e-07, + "loss": 0.5482, + "step": 16750 + }, + { + "epoch": 0.6912189485846332, + "grad_norm": 3.080702676390417, + "learning_rate": 6.896828201250951e-07, + "loss": 0.4583, + "step": 16751 + }, + { + "epoch": 0.6912602129239911, + "grad_norm": 2.092944923322132, + "learning_rate": 6.895141209276016e-07, + "loss": 0.5337, + "step": 16752 + }, + { + "epoch": 0.691301477263349, + "grad_norm": 3.5199100729795165, + "learning_rate": 6.893454362074131e-07, + "loss": 0.4913, + "step": 16753 + }, + { + "epoch": 0.6913427416027069, + "grad_norm": 4.053794262165551, + "learning_rate": 6.891767659675437e-07, + "loss": 0.4754, + "step": 16754 + }, + { + "epoch": 0.6913840059420648, + "grad_norm": 6.208551960432722, + "learning_rate": 6.890081102110052e-07, + "loss": 0.5823, + "step": 16755 + }, + { + "epoch": 0.6914252702814228, + "grad_norm": 5.358866770530076, + "learning_rate": 6.888394689408106e-07, + "loss": 0.5324, + "step": 16756 + }, + { + "epoch": 0.6914665346207807, + "grad_norm": 3.7457342912794918, + "learning_rate": 6.886708421599722e-07, + "loss": 0.5309, + "step": 16757 + }, + { + "epoch": 0.6915077989601387, + "grad_norm": 1.875466217555173, + "learning_rate": 6.885022298715028e-07, + "loss": 0.5453, + "step": 16758 + }, + { + "epoch": 0.6915490632994966, + "grad_norm": 3.003939515422972, + "learning_rate": 6.883336320784131e-07, + "loss": 0.4949, + "step": 16759 + }, + { + "epoch": 0.6915903276388545, + "grad_norm": 2.3193377551054004, + "learning_rate": 6.881650487837152e-07, + "loss": 0.5431, + "step": 16760 + }, + { + "epoch": 0.6916315919782124, + "grad_norm": 3.8283582978992645, + "learning_rate": 6.879964799904209e-07, + "loss": 0.4769, + "step": 16761 + }, + { + "epoch": 0.6916728563175704, + "grad_norm": 1.9055753719170934, + "learning_rate": 6.878279257015402e-07, + "loss": 0.4741, + "step": 16762 + }, + { + "epoch": 0.6917141206569283, + "grad_norm": 2.457026635775133, + "learning_rate": 6.876593859200852e-07, + "loss": 0.5321, + "step": 16763 + }, + { + "epoch": 0.6917553849962862, + "grad_norm": 2.6708551762182178, + "learning_rate": 6.874908606490652e-07, + "loss": 0.5314, + "step": 16764 + }, + { + "epoch": 0.6917966493356441, + "grad_norm": 2.9935919152826056, + "learning_rate": 6.87322349891491e-07, + "loss": 0.485, + "step": 16765 + }, + { + "epoch": 0.691837913675002, + "grad_norm": 2.439200475533222, + "learning_rate": 6.871538536503731e-07, + "loss": 0.5146, + "step": 16766 + }, + { + "epoch": 0.6918791780143599, + "grad_norm": 5.168226013049029, + "learning_rate": 6.869853719287202e-07, + "loss": 0.5361, + "step": 16767 + }, + { + "epoch": 0.691920442353718, + "grad_norm": 3.1120463894628214, + "learning_rate": 6.868169047295425e-07, + "loss": 0.5737, + "step": 16768 + }, + { + "epoch": 0.6919617066930759, + "grad_norm": 5.161987359697684, + "learning_rate": 6.866484520558491e-07, + "loss": 0.4742, + "step": 16769 + }, + { + "epoch": 0.6920029710324338, + "grad_norm": 5.705493569183799, + "learning_rate": 6.864800139106497e-07, + "loss": 0.5698, + "step": 16770 + }, + { + "epoch": 0.6920442353717917, + "grad_norm": 2.6592383694880315, + "learning_rate": 6.863115902969518e-07, + "loss": 0.4779, + "step": 16771 + }, + { + "epoch": 0.6920854997111496, + "grad_norm": 3.2146495571269154, + "learning_rate": 6.861431812177643e-07, + "loss": 0.5887, + "step": 16772 + }, + { + "epoch": 0.6921267640505075, + "grad_norm": 2.2104051895968744, + "learning_rate": 6.859747866760955e-07, + "loss": 0.5171, + "step": 16773 + }, + { + "epoch": 0.6921680283898655, + "grad_norm": 3.3303834738211666, + "learning_rate": 6.858064066749542e-07, + "loss": 0.5071, + "step": 16774 + }, + { + "epoch": 0.6922092927292234, + "grad_norm": 2.689248792321523, + "learning_rate": 6.856380412173472e-07, + "loss": 0.6005, + "step": 16775 + }, + { + "epoch": 0.6922505570685813, + "grad_norm": 9.55363655537272, + "learning_rate": 6.854696903062813e-07, + "loss": 0.4349, + "step": 16776 + }, + { + "epoch": 0.6922918214079392, + "grad_norm": 2.672335679038817, + "learning_rate": 6.853013539447644e-07, + "loss": 0.5445, + "step": 16777 + }, + { + "epoch": 0.6923330857472972, + "grad_norm": 3.3551501364634575, + "learning_rate": 6.851330321358035e-07, + "loss": 0.5412, + "step": 16778 + }, + { + "epoch": 0.6923743500866552, + "grad_norm": 3.353667832607497, + "learning_rate": 6.849647248824056e-07, + "loss": 0.5734, + "step": 16779 + }, + { + "epoch": 0.6924156144260131, + "grad_norm": 2.4500853907934887, + "learning_rate": 6.847964321875762e-07, + "loss": 0.562, + "step": 16780 + }, + { + "epoch": 0.692456878765371, + "grad_norm": 3.2646269088372937, + "learning_rate": 6.846281540543217e-07, + "loss": 0.4874, + "step": 16781 + }, + { + "epoch": 0.6924981431047289, + "grad_norm": 6.453992048526841, + "learning_rate": 6.844598904856482e-07, + "loss": 0.5703, + "step": 16782 + }, + { + "epoch": 0.6925394074440868, + "grad_norm": 5.659601068801148, + "learning_rate": 6.842916414845618e-07, + "loss": 0.5313, + "step": 16783 + }, + { + "epoch": 0.6925806717834447, + "grad_norm": 1.9907769229988046, + "learning_rate": 6.841234070540666e-07, + "loss": 0.4858, + "step": 16784 + }, + { + "epoch": 0.6926219361228027, + "grad_norm": 2.2239894040175128, + "learning_rate": 6.839551871971684e-07, + "loss": 0.5171, + "step": 16785 + }, + { + "epoch": 0.6926632004621606, + "grad_norm": 2.373197584259899, + "learning_rate": 6.837869819168725e-07, + "loss": 0.5283, + "step": 16786 + }, + { + "epoch": 0.6927044648015185, + "grad_norm": 2.7305392925548175, + "learning_rate": 6.836187912161831e-07, + "loss": 0.5089, + "step": 16787 + }, + { + "epoch": 0.6927457291408765, + "grad_norm": 3.0860897805489715, + "learning_rate": 6.834506150981038e-07, + "loss": 0.5609, + "step": 16788 + }, + { + "epoch": 0.6927869934802344, + "grad_norm": 4.030743222428565, + "learning_rate": 6.83282453565639e-07, + "loss": 0.5377, + "step": 16789 + }, + { + "epoch": 0.6928282578195923, + "grad_norm": 2.3069984096659244, + "learning_rate": 6.83114306621793e-07, + "loss": 0.5129, + "step": 16790 + }, + { + "epoch": 0.6928695221589503, + "grad_norm": 2.347377207233148, + "learning_rate": 6.829461742695692e-07, + "loss": 0.4968, + "step": 16791 + }, + { + "epoch": 0.6929107864983082, + "grad_norm": 4.514571091945217, + "learning_rate": 6.827780565119705e-07, + "loss": 0.4991, + "step": 16792 + }, + { + "epoch": 0.6929520508376661, + "grad_norm": 4.102313254261651, + "learning_rate": 6.826099533519999e-07, + "loss": 0.5228, + "step": 16793 + }, + { + "epoch": 0.692993315177024, + "grad_norm": 3.774550058884795, + "learning_rate": 6.824418647926604e-07, + "loss": 0.5578, + "step": 16794 + }, + { + "epoch": 0.6930345795163819, + "grad_norm": 1.9819307523242982, + "learning_rate": 6.822737908369548e-07, + "loss": 0.5737, + "step": 16795 + }, + { + "epoch": 0.6930758438557398, + "grad_norm": 2.316992749209064, + "learning_rate": 6.821057314878844e-07, + "loss": 0.494, + "step": 16796 + }, + { + "epoch": 0.6931171081950978, + "grad_norm": 1.6861071789737276, + "learning_rate": 6.81937686748452e-07, + "loss": 0.5243, + "step": 16797 + }, + { + "epoch": 0.6931583725344557, + "grad_norm": 2.4310406935828044, + "learning_rate": 6.817696566216588e-07, + "loss": 0.5105, + "step": 16798 + }, + { + "epoch": 0.6931996368738137, + "grad_norm": 13.964074831179198, + "learning_rate": 6.816016411105069e-07, + "loss": 0.5081, + "step": 16799 + }, + { + "epoch": 0.6932409012131716, + "grad_norm": 6.2117722107853135, + "learning_rate": 6.81433640217997e-07, + "loss": 0.5179, + "step": 16800 + }, + { + "epoch": 0.6932821655525295, + "grad_norm": 2.9167965807065963, + "learning_rate": 6.812656539471297e-07, + "loss": 0.5183, + "step": 16801 + }, + { + "epoch": 0.6933234298918874, + "grad_norm": 2.4084078655623844, + "learning_rate": 6.810976823009056e-07, + "loss": 0.4932, + "step": 16802 + }, + { + "epoch": 0.6933646942312454, + "grad_norm": 2.0501334907440936, + "learning_rate": 6.809297252823264e-07, + "loss": 0.5475, + "step": 16803 + }, + { + "epoch": 0.6934059585706033, + "grad_norm": 1.8724093954679863, + "learning_rate": 6.807617828943905e-07, + "loss": 0.4632, + "step": 16804 + }, + { + "epoch": 0.6934472229099612, + "grad_norm": 3.3341392663501397, + "learning_rate": 6.805938551400988e-07, + "loss": 0.5216, + "step": 16805 + }, + { + "epoch": 0.6934884872493191, + "grad_norm": 4.923929458313095, + "learning_rate": 6.804259420224506e-07, + "loss": 0.5446, + "step": 16806 + }, + { + "epoch": 0.693529751588677, + "grad_norm": 2.3845479313473916, + "learning_rate": 6.802580435444459e-07, + "loss": 0.5385, + "step": 16807 + }, + { + "epoch": 0.6935710159280349, + "grad_norm": 4.648706256501679, + "learning_rate": 6.800901597090827e-07, + "loss": 0.4644, + "step": 16808 + }, + { + "epoch": 0.693612280267393, + "grad_norm": 2.065081964960343, + "learning_rate": 6.799222905193601e-07, + "loss": 0.467, + "step": 16809 + }, + { + "epoch": 0.6936535446067509, + "grad_norm": 7.248783018977551, + "learning_rate": 6.797544359782771e-07, + "loss": 0.5222, + "step": 16810 + }, + { + "epoch": 0.6936948089461088, + "grad_norm": 3.4971373404769452, + "learning_rate": 6.795865960888325e-07, + "loss": 0.5034, + "step": 16811 + }, + { + "epoch": 0.6937360732854667, + "grad_norm": 23.746355364934367, + "learning_rate": 6.794187708540228e-07, + "loss": 0.5082, + "step": 16812 + }, + { + "epoch": 0.6937773376248246, + "grad_norm": 4.088571716636043, + "learning_rate": 6.792509602768473e-07, + "loss": 0.4884, + "step": 16813 + }, + { + "epoch": 0.6938186019641825, + "grad_norm": 2.2429955219666757, + "learning_rate": 6.790831643603025e-07, + "loss": 0.4982, + "step": 16814 + }, + { + "epoch": 0.6938598663035405, + "grad_norm": 2.9071961811214484, + "learning_rate": 6.789153831073858e-07, + "loss": 0.4691, + "step": 16815 + }, + { + "epoch": 0.6939011306428984, + "grad_norm": 3.0044788002209772, + "learning_rate": 6.78747616521095e-07, + "loss": 0.5208, + "step": 16816 + }, + { + "epoch": 0.6939423949822563, + "grad_norm": 3.304972267644452, + "learning_rate": 6.785798646044256e-07, + "loss": 0.4666, + "step": 16817 + }, + { + "epoch": 0.6939836593216142, + "grad_norm": 6.1259181594666465, + "learning_rate": 6.784121273603748e-07, + "loss": 0.5226, + "step": 16818 + }, + { + "epoch": 0.6940249236609722, + "grad_norm": 3.255155402402394, + "learning_rate": 6.782444047919387e-07, + "loss": 0.5208, + "step": 16819 + }, + { + "epoch": 0.6940661880003302, + "grad_norm": 3.5368479564316093, + "learning_rate": 6.780766969021138e-07, + "loss": 0.4824, + "step": 16820 + }, + { + "epoch": 0.6941074523396881, + "grad_norm": 2.2742701677701462, + "learning_rate": 6.779090036938946e-07, + "loss": 0.508, + "step": 16821 + }, + { + "epoch": 0.694148716679046, + "grad_norm": 2.3768785159125003, + "learning_rate": 6.777413251702771e-07, + "loss": 0.4737, + "step": 16822 + }, + { + "epoch": 0.6941899810184039, + "grad_norm": 6.995854803555306, + "learning_rate": 6.775736613342567e-07, + "loss": 0.4434, + "step": 16823 + }, + { + "epoch": 0.6942312453577618, + "grad_norm": 2.5691235550683973, + "learning_rate": 6.774060121888285e-07, + "loss": 0.5296, + "step": 16824 + }, + { + "epoch": 0.6942725096971197, + "grad_norm": 2.766634898806493, + "learning_rate": 6.772383777369863e-07, + "loss": 0.5361, + "step": 16825 + }, + { + "epoch": 0.6943137740364776, + "grad_norm": 1.9619174999050626, + "learning_rate": 6.770707579817254e-07, + "loss": 0.5157, + "step": 16826 + }, + { + "epoch": 0.6943550383758356, + "grad_norm": 4.943969267531921, + "learning_rate": 6.769031529260388e-07, + "loss": 0.4917, + "step": 16827 + }, + { + "epoch": 0.6943963027151935, + "grad_norm": 3.5107172165199403, + "learning_rate": 6.767355625729216e-07, + "loss": 0.5535, + "step": 16828 + }, + { + "epoch": 0.6944375670545515, + "grad_norm": 2.375883031301097, + "learning_rate": 6.765679869253663e-07, + "loss": 0.5248, + "step": 16829 + }, + { + "epoch": 0.6944788313939094, + "grad_norm": 2.412422506862955, + "learning_rate": 6.764004259863664e-07, + "loss": 0.54, + "step": 16830 + }, + { + "epoch": 0.6945200957332673, + "grad_norm": 2.9143813933060563, + "learning_rate": 6.762328797589154e-07, + "loss": 0.5247, + "step": 16831 + }, + { + "epoch": 0.6945613600726253, + "grad_norm": 3.4731063966981583, + "learning_rate": 6.760653482460065e-07, + "loss": 0.538, + "step": 16832 + }, + { + "epoch": 0.6946026244119832, + "grad_norm": 5.053488815718215, + "learning_rate": 6.75897831450631e-07, + "loss": 0.5332, + "step": 16833 + }, + { + "epoch": 0.6946438887513411, + "grad_norm": 4.219079754139014, + "learning_rate": 6.757303293757819e-07, + "loss": 0.5396, + "step": 16834 + }, + { + "epoch": 0.694685153090699, + "grad_norm": 6.054262918287808, + "learning_rate": 6.75562842024451e-07, + "loss": 0.5339, + "step": 16835 + }, + { + "epoch": 0.6947264174300569, + "grad_norm": 5.114945058427142, + "learning_rate": 6.753953693996307e-07, + "loss": 0.5089, + "step": 16836 + }, + { + "epoch": 0.6947676817694148, + "grad_norm": 2.8175795721440946, + "learning_rate": 6.752279115043116e-07, + "loss": 0.5478, + "step": 16837 + }, + { + "epoch": 0.6948089461087728, + "grad_norm": 4.141849777972236, + "learning_rate": 6.750604683414851e-07, + "loss": 0.5305, + "step": 16838 + }, + { + "epoch": 0.6948502104481308, + "grad_norm": 25.511055702873445, + "learning_rate": 6.748930399141425e-07, + "loss": 0.5218, + "step": 16839 + }, + { + "epoch": 0.6948914747874887, + "grad_norm": 3.3001278771403597, + "learning_rate": 6.747256262252746e-07, + "loss": 0.5014, + "step": 16840 + }, + { + "epoch": 0.6949327391268466, + "grad_norm": 3.4357690501798386, + "learning_rate": 6.745582272778719e-07, + "loss": 0.4875, + "step": 16841 + }, + { + "epoch": 0.6949740034662045, + "grad_norm": 3.1560677945868587, + "learning_rate": 6.743908430749233e-07, + "loss": 0.541, + "step": 16842 + }, + { + "epoch": 0.6950152678055624, + "grad_norm": 5.69048454955016, + "learning_rate": 6.742234736194196e-07, + "loss": 0.5771, + "step": 16843 + }, + { + "epoch": 0.6950565321449204, + "grad_norm": 4.443125502703955, + "learning_rate": 6.740561189143511e-07, + "loss": 0.5403, + "step": 16844 + }, + { + "epoch": 0.6950977964842783, + "grad_norm": 2.325263670688724, + "learning_rate": 6.738887789627061e-07, + "loss": 0.4405, + "step": 16845 + }, + { + "epoch": 0.6951390608236362, + "grad_norm": 2.2216471023871667, + "learning_rate": 6.73721453767474e-07, + "loss": 0.5018, + "step": 16846 + }, + { + "epoch": 0.6951803251629941, + "grad_norm": 2.732563906550802, + "learning_rate": 6.735541433316438e-07, + "loss": 0.5482, + "step": 16847 + }, + { + "epoch": 0.695221589502352, + "grad_norm": 2.6219530357789265, + "learning_rate": 6.733868476582044e-07, + "loss": 0.4815, + "step": 16848 + }, + { + "epoch": 0.69526285384171, + "grad_norm": 7.8954508841489766, + "learning_rate": 6.732195667501435e-07, + "loss": 0.5143, + "step": 16849 + }, + { + "epoch": 0.695304118181068, + "grad_norm": 8.128530553016356, + "learning_rate": 6.730523006104492e-07, + "loss": 0.575, + "step": 16850 + }, + { + "epoch": 0.6953453825204259, + "grad_norm": 5.866676112143087, + "learning_rate": 6.728850492421097e-07, + "loss": 0.5675, + "step": 16851 + }, + { + "epoch": 0.6953866468597838, + "grad_norm": 3.8750352798494676, + "learning_rate": 6.727178126481127e-07, + "loss": 0.5216, + "step": 16852 + }, + { + "epoch": 0.6954279111991417, + "grad_norm": 1.9600960334482311, + "learning_rate": 6.725505908314454e-07, + "loss": 0.5023, + "step": 16853 + }, + { + "epoch": 0.6954691755384996, + "grad_norm": 2.604093344186368, + "learning_rate": 6.723833837950937e-07, + "loss": 0.5149, + "step": 16854 + }, + { + "epoch": 0.6955104398778575, + "grad_norm": 6.473158182208321, + "learning_rate": 6.722161915420453e-07, + "loss": 0.5223, + "step": 16855 + }, + { + "epoch": 0.6955517042172155, + "grad_norm": 4.5982557897941385, + "learning_rate": 6.720490140752862e-07, + "loss": 0.5322, + "step": 16856 + }, + { + "epoch": 0.6955929685565734, + "grad_norm": 2.203303201456149, + "learning_rate": 6.718818513978039e-07, + "loss": 0.5369, + "step": 16857 + }, + { + "epoch": 0.6956342328959313, + "grad_norm": 2.8797665174270364, + "learning_rate": 6.717147035125827e-07, + "loss": 0.5261, + "step": 16858 + }, + { + "epoch": 0.6956754972352892, + "grad_norm": 2.609841537380797, + "learning_rate": 6.715475704226087e-07, + "loss": 0.4713, + "step": 16859 + }, + { + "epoch": 0.6957167615746472, + "grad_norm": 2.995920666344614, + "learning_rate": 6.713804521308678e-07, + "loss": 0.5137, + "step": 16860 + }, + { + "epoch": 0.6957580259140052, + "grad_norm": 3.9142018673274244, + "learning_rate": 6.712133486403454e-07, + "loss": 0.4707, + "step": 16861 + }, + { + "epoch": 0.6957992902533631, + "grad_norm": 3.5309889850129172, + "learning_rate": 6.710462599540255e-07, + "loss": 0.4718, + "step": 16862 + }, + { + "epoch": 0.695840554592721, + "grad_norm": 4.4105764384510175, + "learning_rate": 6.708791860748927e-07, + "loss": 0.5964, + "step": 16863 + }, + { + "epoch": 0.6958818189320789, + "grad_norm": 2.483475403887237, + "learning_rate": 6.707121270059327e-07, + "loss": 0.5065, + "step": 16864 + }, + { + "epoch": 0.6959230832714368, + "grad_norm": 2.146420543240821, + "learning_rate": 6.705450827501282e-07, + "loss": 0.4531, + "step": 16865 + }, + { + "epoch": 0.6959643476107947, + "grad_norm": 2.7383400283344024, + "learning_rate": 6.703780533104637e-07, + "loss": 0.5692, + "step": 16866 + }, + { + "epoch": 0.6960056119501526, + "grad_norm": 2.599381187547641, + "learning_rate": 6.702110386899223e-07, + "loss": 0.4959, + "step": 16867 + }, + { + "epoch": 0.6960468762895106, + "grad_norm": 3.3418674019350925, + "learning_rate": 6.700440388914875e-07, + "loss": 0.5094, + "step": 16868 + }, + { + "epoch": 0.6960881406288685, + "grad_norm": 2.5159400635052944, + "learning_rate": 6.698770539181429e-07, + "loss": 0.5131, + "step": 16869 + }, + { + "epoch": 0.6961294049682265, + "grad_norm": 3.7221659240450258, + "learning_rate": 6.697100837728702e-07, + "loss": 0.5138, + "step": 16870 + }, + { + "epoch": 0.6961706693075844, + "grad_norm": 3.478408850637796, + "learning_rate": 6.695431284586524e-07, + "loss": 0.5049, + "step": 16871 + }, + { + "epoch": 0.6962119336469423, + "grad_norm": 2.1636487754753433, + "learning_rate": 6.69376187978472e-07, + "loss": 0.5508, + "step": 16872 + }, + { + "epoch": 0.6962531979863003, + "grad_norm": 5.090035238578666, + "learning_rate": 6.692092623353114e-07, + "loss": 0.5402, + "step": 16873 + }, + { + "epoch": 0.6962944623256582, + "grad_norm": 1.9277074115536337, + "learning_rate": 6.690423515321509e-07, + "loss": 0.5289, + "step": 16874 + }, + { + "epoch": 0.6963357266650161, + "grad_norm": 5.9443287763741255, + "learning_rate": 6.688754555719727e-07, + "loss": 0.4932, + "step": 16875 + }, + { + "epoch": 0.696376991004374, + "grad_norm": 2.4253912229087087, + "learning_rate": 6.687085744577581e-07, + "loss": 0.5015, + "step": 16876 + }, + { + "epoch": 0.6964182553437319, + "grad_norm": 2.7816392159555625, + "learning_rate": 6.685417081924887e-07, + "loss": 0.4923, + "step": 16877 + }, + { + "epoch": 0.6964595196830898, + "grad_norm": 5.547411676441004, + "learning_rate": 6.683748567791437e-07, + "loss": 0.5453, + "step": 16878 + }, + { + "epoch": 0.6965007840224477, + "grad_norm": 7.854445222802841, + "learning_rate": 6.682080202207046e-07, + "loss": 0.5154, + "step": 16879 + }, + { + "epoch": 0.6965420483618058, + "grad_norm": 2.397472990292567, + "learning_rate": 6.680411985201506e-07, + "loss": 0.5024, + "step": 16880 + }, + { + "epoch": 0.6965833127011637, + "grad_norm": 2.1625303020718833, + "learning_rate": 6.678743916804627e-07, + "loss": 0.5009, + "step": 16881 + }, + { + "epoch": 0.6966245770405216, + "grad_norm": 4.046457859947951, + "learning_rate": 6.67707599704619e-07, + "loss": 0.5303, + "step": 16882 + }, + { + "epoch": 0.6966658413798795, + "grad_norm": 2.8249656759189707, + "learning_rate": 6.675408225955998e-07, + "loss": 0.5449, + "step": 16883 + }, + { + "epoch": 0.6967071057192374, + "grad_norm": 3.148352787445366, + "learning_rate": 6.67374060356384e-07, + "loss": 0.5032, + "step": 16884 + }, + { + "epoch": 0.6967483700585954, + "grad_norm": 2.8424627940041356, + "learning_rate": 6.672073129899511e-07, + "loss": 0.4633, + "step": 16885 + }, + { + "epoch": 0.6967896343979533, + "grad_norm": 5.309038132349648, + "learning_rate": 6.670405804992782e-07, + "loss": 0.5246, + "step": 16886 + }, + { + "epoch": 0.6968308987373112, + "grad_norm": 4.080457111176478, + "learning_rate": 6.668738628873443e-07, + "loss": 0.5021, + "step": 16887 + }, + { + "epoch": 0.6968721630766691, + "grad_norm": 3.8308245957059834, + "learning_rate": 6.667071601571274e-07, + "loss": 0.5063, + "step": 16888 + }, + { + "epoch": 0.696913427416027, + "grad_norm": 3.1782001987495114, + "learning_rate": 6.665404723116057e-07, + "loss": 0.5532, + "step": 16889 + }, + { + "epoch": 0.696954691755385, + "grad_norm": 3.4296432351674864, + "learning_rate": 6.663737993537556e-07, + "loss": 0.5659, + "step": 16890 + }, + { + "epoch": 0.696995956094743, + "grad_norm": 3.1475069999661542, + "learning_rate": 6.662071412865549e-07, + "loss": 0.5375, + "step": 16891 + }, + { + "epoch": 0.6970372204341009, + "grad_norm": 2.9332662220960977, + "learning_rate": 6.660404981129811e-07, + "loss": 0.5663, + "step": 16892 + }, + { + "epoch": 0.6970784847734588, + "grad_norm": 2.398507844331325, + "learning_rate": 6.658738698360096e-07, + "loss": 0.5304, + "step": 16893 + }, + { + "epoch": 0.6971197491128167, + "grad_norm": 6.616029654906152, + "learning_rate": 6.657072564586179e-07, + "loss": 0.4989, + "step": 16894 + }, + { + "epoch": 0.6971610134521746, + "grad_norm": 4.11914785272055, + "learning_rate": 6.655406579837814e-07, + "loss": 0.5133, + "step": 16895 + }, + { + "epoch": 0.6972022777915325, + "grad_norm": 10.875373771162598, + "learning_rate": 6.653740744144761e-07, + "loss": 0.5206, + "step": 16896 + }, + { + "epoch": 0.6972435421308905, + "grad_norm": 2.4828044783599905, + "learning_rate": 6.652075057536778e-07, + "loss": 0.5481, + "step": 16897 + }, + { + "epoch": 0.6972848064702484, + "grad_norm": 3.0362008057860796, + "learning_rate": 6.650409520043621e-07, + "loss": 0.5468, + "step": 16898 + }, + { + "epoch": 0.6973260708096063, + "grad_norm": 5.626157586783149, + "learning_rate": 6.648744131695035e-07, + "loss": 0.5162, + "step": 16899 + }, + { + "epoch": 0.6973673351489643, + "grad_norm": 4.301438379452265, + "learning_rate": 6.647078892520769e-07, + "loss": 0.539, + "step": 16900 + }, + { + "epoch": 0.6974085994883222, + "grad_norm": 2.7693768998122343, + "learning_rate": 6.64541380255057e-07, + "loss": 0.4759, + "step": 16901 + }, + { + "epoch": 0.6974498638276801, + "grad_norm": 2.8633703633146217, + "learning_rate": 6.643748861814186e-07, + "loss": 0.5351, + "step": 16902 + }, + { + "epoch": 0.6974911281670381, + "grad_norm": 2.7465186479864814, + "learning_rate": 6.642084070341346e-07, + "loss": 0.5936, + "step": 16903 + }, + { + "epoch": 0.697532392506396, + "grad_norm": 2.898841519526196, + "learning_rate": 6.640419428161794e-07, + "loss": 0.5102, + "step": 16904 + }, + { + "epoch": 0.6975736568457539, + "grad_norm": 3.3819826166753972, + "learning_rate": 6.638754935305266e-07, + "loss": 0.5403, + "step": 16905 + }, + { + "epoch": 0.6976149211851118, + "grad_norm": 1.973848138814799, + "learning_rate": 6.637090591801494e-07, + "loss": 0.4798, + "step": 16906 + }, + { + "epoch": 0.6976561855244697, + "grad_norm": 2.7426033745095135, + "learning_rate": 6.635426397680198e-07, + "loss": 0.5126, + "step": 16907 + }, + { + "epoch": 0.6976974498638276, + "grad_norm": 2.489989113082427, + "learning_rate": 6.633762352971112e-07, + "loss": 0.5293, + "step": 16908 + }, + { + "epoch": 0.6977387142031856, + "grad_norm": 1.9308719916527868, + "learning_rate": 6.63209845770396e-07, + "loss": 0.5059, + "step": 16909 + }, + { + "epoch": 0.6977799785425436, + "grad_norm": 3.7467099135050903, + "learning_rate": 6.630434711908468e-07, + "loss": 0.5494, + "step": 16910 + }, + { + "epoch": 0.6978212428819015, + "grad_norm": 2.8287364204634784, + "learning_rate": 6.628771115614343e-07, + "loss": 0.4889, + "step": 16911 + }, + { + "epoch": 0.6978625072212594, + "grad_norm": 2.66749485803093, + "learning_rate": 6.627107668851309e-07, + "loss": 0.484, + "step": 16912 + }, + { + "epoch": 0.6979037715606173, + "grad_norm": 2.151047686240106, + "learning_rate": 6.625444371649077e-07, + "loss": 0.5646, + "step": 16913 + }, + { + "epoch": 0.6979450358999753, + "grad_norm": 3.092040050348223, + "learning_rate": 6.623781224037364e-07, + "loss": 0.488, + "step": 16914 + }, + { + "epoch": 0.6979863002393332, + "grad_norm": 12.458599580193894, + "learning_rate": 6.622118226045867e-07, + "loss": 0.5729, + "step": 16915 + }, + { + "epoch": 0.6980275645786911, + "grad_norm": 3.977591619630439, + "learning_rate": 6.620455377704297e-07, + "loss": 0.4905, + "step": 16916 + }, + { + "epoch": 0.698068828918049, + "grad_norm": 2.143952788582119, + "learning_rate": 6.618792679042356e-07, + "loss": 0.4925, + "step": 16917 + }, + { + "epoch": 0.6981100932574069, + "grad_norm": 6.33965726930057, + "learning_rate": 6.61713013008975e-07, + "loss": 0.5813, + "step": 16918 + }, + { + "epoch": 0.6981513575967648, + "grad_norm": 2.2643183318447146, + "learning_rate": 6.61546773087617e-07, + "loss": 0.511, + "step": 16919 + }, + { + "epoch": 0.6981926219361227, + "grad_norm": 2.5511523219617245, + "learning_rate": 6.613805481431307e-07, + "loss": 0.5453, + "step": 16920 + }, + { + "epoch": 0.6982338862754808, + "grad_norm": 1.5551110716381593, + "learning_rate": 6.612143381784857e-07, + "loss": 0.4795, + "step": 16921 + }, + { + "epoch": 0.6982751506148387, + "grad_norm": 7.898925236169057, + "learning_rate": 6.610481431966512e-07, + "loss": 0.5197, + "step": 16922 + }, + { + "epoch": 0.6983164149541966, + "grad_norm": 2.1753377448029636, + "learning_rate": 6.608819632005955e-07, + "loss": 0.5778, + "step": 16923 + }, + { + "epoch": 0.6983576792935545, + "grad_norm": 14.015463148225793, + "learning_rate": 6.607157981932868e-07, + "loss": 0.5042, + "step": 16924 + }, + { + "epoch": 0.6983989436329124, + "grad_norm": 3.0946781094964018, + "learning_rate": 6.605496481776936e-07, + "loss": 0.5438, + "step": 16925 + }, + { + "epoch": 0.6984402079722704, + "grad_norm": 2.4092001589379617, + "learning_rate": 6.603835131567841e-07, + "loss": 0.4808, + "step": 16926 + }, + { + "epoch": 0.6984814723116283, + "grad_norm": 2.392032339542253, + "learning_rate": 6.60217393133525e-07, + "loss": 0.4955, + "step": 16927 + }, + { + "epoch": 0.6985227366509862, + "grad_norm": 2.3178791348827263, + "learning_rate": 6.600512881108841e-07, + "loss": 0.5393, + "step": 16928 + }, + { + "epoch": 0.6985640009903441, + "grad_norm": 3.018498369998778, + "learning_rate": 6.598851980918284e-07, + "loss": 0.5312, + "step": 16929 + }, + { + "epoch": 0.698605265329702, + "grad_norm": 2.9889566925204187, + "learning_rate": 6.597191230793252e-07, + "loss": 0.5234, + "step": 16930 + }, + { + "epoch": 0.69864652966906, + "grad_norm": 3.0551095302971256, + "learning_rate": 6.595530630763401e-07, + "loss": 0.502, + "step": 16931 + }, + { + "epoch": 0.698687794008418, + "grad_norm": 4.152748889516545, + "learning_rate": 6.593870180858401e-07, + "loss": 0.5797, + "step": 16932 + }, + { + "epoch": 0.6987290583477759, + "grad_norm": 3.6486518138447366, + "learning_rate": 6.592209881107905e-07, + "loss": 0.4827, + "step": 16933 + }, + { + "epoch": 0.6987703226871338, + "grad_norm": 4.267892117759158, + "learning_rate": 6.590549731541574e-07, + "loss": 0.4338, + "step": 16934 + }, + { + "epoch": 0.6988115870264917, + "grad_norm": 2.755861472725285, + "learning_rate": 6.588889732189066e-07, + "loss": 0.5352, + "step": 16935 + }, + { + "epoch": 0.6988528513658496, + "grad_norm": 2.4003284799246947, + "learning_rate": 6.587229883080025e-07, + "loss": 0.479, + "step": 16936 + }, + { + "epoch": 0.6988941157052075, + "grad_norm": 2.391327815138351, + "learning_rate": 6.585570184244103e-07, + "loss": 0.5793, + "step": 16937 + }, + { + "epoch": 0.6989353800445655, + "grad_norm": 3.538656054518108, + "learning_rate": 6.583910635710946e-07, + "loss": 0.4758, + "step": 16938 + }, + { + "epoch": 0.6989766443839234, + "grad_norm": 4.7674802192600785, + "learning_rate": 6.582251237510204e-07, + "loss": 0.4882, + "step": 16939 + }, + { + "epoch": 0.6990179087232813, + "grad_norm": 4.0783821883703615, + "learning_rate": 6.58059198967151e-07, + "loss": 0.4607, + "step": 16940 + }, + { + "epoch": 0.6990591730626393, + "grad_norm": 2.7918580387831002, + "learning_rate": 6.578932892224503e-07, + "loss": 0.5429, + "step": 16941 + }, + { + "epoch": 0.6991004374019972, + "grad_norm": 3.573639982846891, + "learning_rate": 6.577273945198827e-07, + "loss": 0.5749, + "step": 16942 + }, + { + "epoch": 0.6991417017413551, + "grad_norm": 3.0908783849118864, + "learning_rate": 6.575615148624103e-07, + "loss": 0.5477, + "step": 16943 + }, + { + "epoch": 0.6991829660807131, + "grad_norm": 2.765562036876643, + "learning_rate": 6.573956502529968e-07, + "loss": 0.533, + "step": 16944 + }, + { + "epoch": 0.699224230420071, + "grad_norm": 3.0580960656943867, + "learning_rate": 6.572298006946054e-07, + "loss": 0.5275, + "step": 16945 + }, + { + "epoch": 0.6992654947594289, + "grad_norm": 7.435854216008789, + "learning_rate": 6.570639661901974e-07, + "loss": 0.5481, + "step": 16946 + }, + { + "epoch": 0.6993067590987868, + "grad_norm": 4.310919843348248, + "learning_rate": 6.568981467427363e-07, + "loss": 0.4916, + "step": 16947 + }, + { + "epoch": 0.6993480234381447, + "grad_norm": 2.692276780900017, + "learning_rate": 6.567323423551828e-07, + "loss": 0.4342, + "step": 16948 + }, + { + "epoch": 0.6993892877775026, + "grad_norm": 2.853579952957297, + "learning_rate": 6.565665530304993e-07, + "loss": 0.516, + "step": 16949 + }, + { + "epoch": 0.6994305521168606, + "grad_norm": 3.8540097452022986, + "learning_rate": 6.56400778771647e-07, + "loss": 0.4565, + "step": 16950 + }, + { + "epoch": 0.6994718164562186, + "grad_norm": 3.367376335106777, + "learning_rate": 6.562350195815878e-07, + "loss": 0.5306, + "step": 16951 + }, + { + "epoch": 0.6995130807955765, + "grad_norm": 3.073289917645681, + "learning_rate": 6.560692754632812e-07, + "loss": 0.5078, + "step": 16952 + }, + { + "epoch": 0.6995543451349344, + "grad_norm": 15.606233619395896, + "learning_rate": 6.559035464196886e-07, + "loss": 0.5257, + "step": 16953 + }, + { + "epoch": 0.6995956094742923, + "grad_norm": 2.859903145026382, + "learning_rate": 6.557378324537703e-07, + "loss": 0.5074, + "step": 16954 + }, + { + "epoch": 0.6996368738136503, + "grad_norm": 2.974777345584717, + "learning_rate": 6.55572133568487e-07, + "loss": 0.5277, + "step": 16955 + }, + { + "epoch": 0.6996781381530082, + "grad_norm": 3.153965568072695, + "learning_rate": 6.554064497667971e-07, + "loss": 0.5452, + "step": 16956 + }, + { + "epoch": 0.6997194024923661, + "grad_norm": 4.821858072399169, + "learning_rate": 6.552407810516611e-07, + "loss": 0.4452, + "step": 16957 + }, + { + "epoch": 0.699760666831724, + "grad_norm": 12.871405709714104, + "learning_rate": 6.550751274260384e-07, + "loss": 0.4908, + "step": 16958 + }, + { + "epoch": 0.6998019311710819, + "grad_norm": 2.4915770844420146, + "learning_rate": 6.549094888928875e-07, + "loss": 0.5037, + "step": 16959 + }, + { + "epoch": 0.6998431955104398, + "grad_norm": 3.1404182114516157, + "learning_rate": 6.547438654551667e-07, + "loss": 0.5375, + "step": 16960 + }, + { + "epoch": 0.6998844598497979, + "grad_norm": 16.01982606751601, + "learning_rate": 6.545782571158351e-07, + "loss": 0.5207, + "step": 16961 + }, + { + "epoch": 0.6999257241891558, + "grad_norm": 3.705346513517861, + "learning_rate": 6.544126638778508e-07, + "loss": 0.5136, + "step": 16962 + }, + { + "epoch": 0.6999669885285137, + "grad_norm": 2.815389478139974, + "learning_rate": 6.542470857441721e-07, + "loss": 0.5091, + "step": 16963 + }, + { + "epoch": 0.7000082528678716, + "grad_norm": 1.8865329647169105, + "learning_rate": 6.540815227177555e-07, + "loss": 0.4548, + "step": 16964 + }, + { + "epoch": 0.7000495172072295, + "grad_norm": 5.151073417305585, + "learning_rate": 6.539159748015594e-07, + "loss": 0.5386, + "step": 16965 + }, + { + "epoch": 0.7000907815465874, + "grad_norm": 18.470163077681793, + "learning_rate": 6.537504419985402e-07, + "loss": 0.5385, + "step": 16966 + }, + { + "epoch": 0.7001320458859454, + "grad_norm": 2.716717915665329, + "learning_rate": 6.53584924311656e-07, + "loss": 0.4885, + "step": 16967 + }, + { + "epoch": 0.7001733102253033, + "grad_norm": 8.43509978121427, + "learning_rate": 6.534194217438617e-07, + "loss": 0.5679, + "step": 16968 + }, + { + "epoch": 0.7002145745646612, + "grad_norm": 22.991666415651615, + "learning_rate": 6.532539342981143e-07, + "loss": 0.4889, + "step": 16969 + }, + { + "epoch": 0.7002558389040191, + "grad_norm": 6.505018559373204, + "learning_rate": 6.5308846197737e-07, + "loss": 0.4922, + "step": 16970 + }, + { + "epoch": 0.7002971032433771, + "grad_norm": 8.737298455800017, + "learning_rate": 6.52923004784585e-07, + "loss": 0.5104, + "step": 16971 + }, + { + "epoch": 0.700338367582735, + "grad_norm": 3.7459735215459364, + "learning_rate": 6.52757562722714e-07, + "loss": 0.4593, + "step": 16972 + }, + { + "epoch": 0.700379631922093, + "grad_norm": 4.31233598868309, + "learning_rate": 6.525921357947119e-07, + "loss": 0.5071, + "step": 16973 + }, + { + "epoch": 0.7004208962614509, + "grad_norm": 27.624646529038575, + "learning_rate": 6.524267240035344e-07, + "loss": 0.5767, + "step": 16974 + }, + { + "epoch": 0.7004621606008088, + "grad_norm": 2.5631953458581753, + "learning_rate": 6.522613273521357e-07, + "loss": 0.5272, + "step": 16975 + }, + { + "epoch": 0.7005034249401667, + "grad_norm": 4.201337942112112, + "learning_rate": 6.520959458434709e-07, + "loss": 0.5209, + "step": 16976 + }, + { + "epoch": 0.7005446892795246, + "grad_norm": 2.5691712315103805, + "learning_rate": 6.519305794804933e-07, + "loss": 0.5176, + "step": 16977 + }, + { + "epoch": 0.7005859536188825, + "grad_norm": 2.1427761880390666, + "learning_rate": 6.517652282661569e-07, + "loss": 0.5303, + "step": 16978 + }, + { + "epoch": 0.7006272179582405, + "grad_norm": 6.840508703603024, + "learning_rate": 6.515998922034156e-07, + "loss": 0.5531, + "step": 16979 + }, + { + "epoch": 0.7006684822975984, + "grad_norm": 2.596105809699905, + "learning_rate": 6.514345712952231e-07, + "loss": 0.5647, + "step": 16980 + }, + { + "epoch": 0.7007097466369563, + "grad_norm": 5.162856396630248, + "learning_rate": 6.512692655445315e-07, + "loss": 0.5081, + "step": 16981 + }, + { + "epoch": 0.7007510109763143, + "grad_norm": 8.139218555551201, + "learning_rate": 6.511039749542938e-07, + "loss": 0.5375, + "step": 16982 + }, + { + "epoch": 0.7007922753156722, + "grad_norm": 1.827406518953582, + "learning_rate": 6.509386995274634e-07, + "loss": 0.4733, + "step": 16983 + }, + { + "epoch": 0.7008335396550301, + "grad_norm": 5.537500388743283, + "learning_rate": 6.507734392669918e-07, + "loss": 0.5021, + "step": 16984 + }, + { + "epoch": 0.7008748039943881, + "grad_norm": 2.5982472052254146, + "learning_rate": 6.506081941758305e-07, + "loss": 0.4822, + "step": 16985 + }, + { + "epoch": 0.700916068333746, + "grad_norm": 2.88619073691893, + "learning_rate": 6.504429642569318e-07, + "loss": 0.5226, + "step": 16986 + }, + { + "epoch": 0.7009573326731039, + "grad_norm": 2.3456482847822513, + "learning_rate": 6.502777495132469e-07, + "loss": 0.4722, + "step": 16987 + }, + { + "epoch": 0.7009985970124618, + "grad_norm": 4.7553131399086785, + "learning_rate": 6.501125499477276e-07, + "loss": 0.5142, + "step": 16988 + }, + { + "epoch": 0.7010398613518197, + "grad_norm": 2.8899165183710456, + "learning_rate": 6.499473655633237e-07, + "loss": 0.4871, + "step": 16989 + }, + { + "epoch": 0.7010811256911776, + "grad_norm": 2.5083613824246207, + "learning_rate": 6.497821963629866e-07, + "loss": 0.5403, + "step": 16990 + }, + { + "epoch": 0.7011223900305356, + "grad_norm": 2.4094012311683795, + "learning_rate": 6.496170423496661e-07, + "loss": 0.529, + "step": 16991 + }, + { + "epoch": 0.7011636543698936, + "grad_norm": 2.458568976032467, + "learning_rate": 6.494519035263131e-07, + "loss": 0.495, + "step": 16992 + }, + { + "epoch": 0.7012049187092515, + "grad_norm": 4.321351145852376, + "learning_rate": 6.492867798958765e-07, + "loss": 0.5139, + "step": 16993 + }, + { + "epoch": 0.7012461830486094, + "grad_norm": 2.7329281800206005, + "learning_rate": 6.491216714613061e-07, + "loss": 0.4625, + "step": 16994 + }, + { + "epoch": 0.7012874473879673, + "grad_norm": 2.3702959444887863, + "learning_rate": 6.489565782255513e-07, + "loss": 0.5042, + "step": 16995 + }, + { + "epoch": 0.7013287117273252, + "grad_norm": 2.9023630917141925, + "learning_rate": 6.487915001915614e-07, + "loss": 0.5618, + "step": 16996 + }, + { + "epoch": 0.7013699760666832, + "grad_norm": 2.3184869582799976, + "learning_rate": 6.486264373622847e-07, + "loss": 0.5109, + "step": 16997 + }, + { + "epoch": 0.7014112404060411, + "grad_norm": 3.057231548656891, + "learning_rate": 6.484613897406691e-07, + "loss": 0.4956, + "step": 16998 + }, + { + "epoch": 0.701452504745399, + "grad_norm": 3.0291579935497097, + "learning_rate": 6.482963573296633e-07, + "loss": 0.5339, + "step": 16999 + }, + { + "epoch": 0.7014937690847569, + "grad_norm": 3.9164407959023153, + "learning_rate": 6.481313401322158e-07, + "loss": 0.5276, + "step": 17000 + }, + { + "epoch": 0.7015350334241148, + "grad_norm": 2.4315068613353334, + "learning_rate": 6.479663381512732e-07, + "loss": 0.5262, + "step": 17001 + }, + { + "epoch": 0.7015762977634729, + "grad_norm": 4.125472109242227, + "learning_rate": 6.478013513897831e-07, + "loss": 0.5438, + "step": 17002 + }, + { + "epoch": 0.7016175621028308, + "grad_norm": 2.5370358699386952, + "learning_rate": 6.476363798506928e-07, + "loss": 0.5694, + "step": 17003 + }, + { + "epoch": 0.7016588264421887, + "grad_norm": 3.029427893343508, + "learning_rate": 6.474714235369495e-07, + "loss": 0.4829, + "step": 17004 + }, + { + "epoch": 0.7017000907815466, + "grad_norm": 3.850821186887771, + "learning_rate": 6.473064824514988e-07, + "loss": 0.5092, + "step": 17005 + }, + { + "epoch": 0.7017413551209045, + "grad_norm": 2.2259106154188273, + "learning_rate": 6.471415565972874e-07, + "loss": 0.4355, + "step": 17006 + }, + { + "epoch": 0.7017826194602624, + "grad_norm": 5.177204888799702, + "learning_rate": 6.469766459772612e-07, + "loss": 0.5129, + "step": 17007 + }, + { + "epoch": 0.7018238837996204, + "grad_norm": 2.0674543491347768, + "learning_rate": 6.468117505943668e-07, + "loss": 0.4867, + "step": 17008 + }, + { + "epoch": 0.7018651481389783, + "grad_norm": 6.418640300685209, + "learning_rate": 6.466468704515481e-07, + "loss": 0.5488, + "step": 17009 + }, + { + "epoch": 0.7019064124783362, + "grad_norm": 2.2505476137767935, + "learning_rate": 6.46482005551752e-07, + "loss": 0.5005, + "step": 17010 + }, + { + "epoch": 0.7019476768176941, + "grad_norm": 2.6724827605109613, + "learning_rate": 6.463171558979216e-07, + "loss": 0.4935, + "step": 17011 + }, + { + "epoch": 0.7019889411570521, + "grad_norm": 3.1938740030089843, + "learning_rate": 6.461523214930025e-07, + "loss": 0.5094, + "step": 17012 + }, + { + "epoch": 0.70203020549641, + "grad_norm": 4.118425482360406, + "learning_rate": 6.459875023399393e-07, + "loss": 0.5103, + "step": 17013 + }, + { + "epoch": 0.702071469835768, + "grad_norm": 2.3534104411229264, + "learning_rate": 6.458226984416754e-07, + "loss": 0.4753, + "step": 17014 + }, + { + "epoch": 0.7021127341751259, + "grad_norm": 3.1648558100491493, + "learning_rate": 6.456579098011548e-07, + "loss": 0.5148, + "step": 17015 + }, + { + "epoch": 0.7021539985144838, + "grad_norm": 2.660087851675702, + "learning_rate": 6.454931364213212e-07, + "loss": 0.5738, + "step": 17016 + }, + { + "epoch": 0.7021952628538417, + "grad_norm": 2.188499387224141, + "learning_rate": 6.453283783051183e-07, + "loss": 0.5369, + "step": 17017 + }, + { + "epoch": 0.7022365271931996, + "grad_norm": 7.20317058592533, + "learning_rate": 6.451636354554883e-07, + "loss": 0.5435, + "step": 17018 + }, + { + "epoch": 0.7022777915325575, + "grad_norm": 2.47266041723036, + "learning_rate": 6.449989078753741e-07, + "loss": 0.5049, + "step": 17019 + }, + { + "epoch": 0.7023190558719155, + "grad_norm": 2.778695277303414, + "learning_rate": 6.448341955677188e-07, + "loss": 0.5422, + "step": 17020 + }, + { + "epoch": 0.7023603202112734, + "grad_norm": 2.246062685629918, + "learning_rate": 6.446694985354636e-07, + "loss": 0.5125, + "step": 17021 + }, + { + "epoch": 0.7024015845506314, + "grad_norm": 4.230737401540564, + "learning_rate": 6.445048167815508e-07, + "loss": 0.5337, + "step": 17022 + }, + { + "epoch": 0.7024428488899893, + "grad_norm": 5.123600488400068, + "learning_rate": 6.443401503089221e-07, + "loss": 0.5193, + "step": 17023 + }, + { + "epoch": 0.7024841132293472, + "grad_norm": 2.111300860991904, + "learning_rate": 6.441754991205194e-07, + "loss": 0.4485, + "step": 17024 + }, + { + "epoch": 0.7025253775687051, + "grad_norm": 3.6695335415208543, + "learning_rate": 6.440108632192833e-07, + "loss": 0.5236, + "step": 17025 + }, + { + "epoch": 0.7025666419080631, + "grad_norm": 2.151730670811168, + "learning_rate": 6.438462426081539e-07, + "loss": 0.497, + "step": 17026 + }, + { + "epoch": 0.702607906247421, + "grad_norm": 3.254237732490447, + "learning_rate": 6.436816372900724e-07, + "loss": 0.5146, + "step": 17027 + }, + { + "epoch": 0.7026491705867789, + "grad_norm": 2.485201617789024, + "learning_rate": 6.43517047267979e-07, + "loss": 0.5101, + "step": 17028 + }, + { + "epoch": 0.7026904349261368, + "grad_norm": 4.621205353638363, + "learning_rate": 6.433524725448144e-07, + "loss": 0.5305, + "step": 17029 + }, + { + "epoch": 0.7027316992654947, + "grad_norm": 3.3241718926488764, + "learning_rate": 6.431879131235171e-07, + "loss": 0.534, + "step": 17030 + }, + { + "epoch": 0.7027729636048526, + "grad_norm": 2.1959357592848603, + "learning_rate": 6.43023369007027e-07, + "loss": 0.526, + "step": 17031 + }, + { + "epoch": 0.7028142279442107, + "grad_norm": 17.365432298569562, + "learning_rate": 6.428588401982834e-07, + "loss": 0.5058, + "step": 17032 + }, + { + "epoch": 0.7028554922835686, + "grad_norm": 4.055694153284294, + "learning_rate": 6.426943267002256e-07, + "loss": 0.5332, + "step": 17033 + }, + { + "epoch": 0.7028967566229265, + "grad_norm": 3.9888730008397304, + "learning_rate": 6.425298285157913e-07, + "loss": 0.57, + "step": 17034 + }, + { + "epoch": 0.7029380209622844, + "grad_norm": 1.9466335516292148, + "learning_rate": 6.423653456479193e-07, + "loss": 0.5086, + "step": 17035 + }, + { + "epoch": 0.7029792853016423, + "grad_norm": 3.2689165460547445, + "learning_rate": 6.422008780995478e-07, + "loss": 0.5245, + "step": 17036 + }, + { + "epoch": 0.7030205496410002, + "grad_norm": 2.9652718205562585, + "learning_rate": 6.420364258736155e-07, + "loss": 0.4637, + "step": 17037 + }, + { + "epoch": 0.7030618139803582, + "grad_norm": 2.512917024215428, + "learning_rate": 6.418719889730579e-07, + "loss": 0.554, + "step": 17038 + }, + { + "epoch": 0.7031030783197161, + "grad_norm": 4.698561286084771, + "learning_rate": 6.417075674008131e-07, + "loss": 0.5813, + "step": 17039 + }, + { + "epoch": 0.703144342659074, + "grad_norm": 3.5521292241083224, + "learning_rate": 6.415431611598185e-07, + "loss": 0.5259, + "step": 17040 + }, + { + "epoch": 0.7031856069984319, + "grad_norm": 3.970070724927929, + "learning_rate": 6.413787702530109e-07, + "loss": 0.5075, + "step": 17041 + }, + { + "epoch": 0.7032268713377898, + "grad_norm": 9.03956820521785, + "learning_rate": 6.41214394683326e-07, + "loss": 0.5404, + "step": 17042 + }, + { + "epoch": 0.7032681356771479, + "grad_norm": 3.1116881645838332, + "learning_rate": 6.410500344537004e-07, + "loss": 0.5675, + "step": 17043 + }, + { + "epoch": 0.7033094000165058, + "grad_norm": 6.016290654507332, + "learning_rate": 6.408856895670699e-07, + "loss": 0.4947, + "step": 17044 + }, + { + "epoch": 0.7033506643558637, + "grad_norm": 3.856920518672152, + "learning_rate": 6.407213600263707e-07, + "loss": 0.513, + "step": 17045 + }, + { + "epoch": 0.7033919286952216, + "grad_norm": 2.7206557559059386, + "learning_rate": 6.405570458345371e-07, + "loss": 0.4287, + "step": 17046 + }, + { + "epoch": 0.7034331930345795, + "grad_norm": 2.660166689784395, + "learning_rate": 6.403927469945045e-07, + "loss": 0.4889, + "step": 17047 + }, + { + "epoch": 0.7034744573739374, + "grad_norm": 2.6122807224871627, + "learning_rate": 6.402284635092081e-07, + "loss": 0.5301, + "step": 17048 + }, + { + "epoch": 0.7035157217132953, + "grad_norm": 4.821909876016631, + "learning_rate": 6.400641953815825e-07, + "loss": 0.543, + "step": 17049 + }, + { + "epoch": 0.7035569860526533, + "grad_norm": 5.291809235550785, + "learning_rate": 6.398999426145616e-07, + "loss": 0.5501, + "step": 17050 + }, + { + "epoch": 0.7035982503920112, + "grad_norm": 5.877915758139426, + "learning_rate": 6.397357052110789e-07, + "loss": 0.5014, + "step": 17051 + }, + { + "epoch": 0.7036395147313691, + "grad_norm": 3.1138867918528326, + "learning_rate": 6.395714831740685e-07, + "loss": 0.515, + "step": 17052 + }, + { + "epoch": 0.7036807790707271, + "grad_norm": 6.687281353672435, + "learning_rate": 6.394072765064639e-07, + "loss": 0.516, + "step": 17053 + }, + { + "epoch": 0.703722043410085, + "grad_norm": 13.51474792412491, + "learning_rate": 6.392430852111988e-07, + "loss": 0.5149, + "step": 17054 + }, + { + "epoch": 0.703763307749443, + "grad_norm": 7.412925180207138, + "learning_rate": 6.390789092912048e-07, + "loss": 0.5579, + "step": 17055 + }, + { + "epoch": 0.7038045720888009, + "grad_norm": 3.032381123614146, + "learning_rate": 6.389147487494153e-07, + "loss": 0.4936, + "step": 17056 + }, + { + "epoch": 0.7038458364281588, + "grad_norm": 4.843955500517753, + "learning_rate": 6.387506035887625e-07, + "loss": 0.5198, + "step": 17057 + }, + { + "epoch": 0.7038871007675167, + "grad_norm": 2.85409086148964, + "learning_rate": 6.38586473812179e-07, + "loss": 0.5048, + "step": 17058 + }, + { + "epoch": 0.7039283651068746, + "grad_norm": 4.323060551575505, + "learning_rate": 6.384223594225954e-07, + "loss": 0.5459, + "step": 17059 + }, + { + "epoch": 0.7039696294462325, + "grad_norm": 2.4565268420465842, + "learning_rate": 6.382582604229439e-07, + "loss": 0.4277, + "step": 17060 + }, + { + "epoch": 0.7040108937855905, + "grad_norm": 3.0469983881702047, + "learning_rate": 6.380941768161561e-07, + "loss": 0.4904, + "step": 17061 + }, + { + "epoch": 0.7040521581249484, + "grad_norm": 7.751876758879263, + "learning_rate": 6.379301086051618e-07, + "loss": 0.5082, + "step": 17062 + }, + { + "epoch": 0.7040934224643064, + "grad_norm": 9.8658971994557, + "learning_rate": 6.377660557928929e-07, + "loss": 0.4946, + "step": 17063 + }, + { + "epoch": 0.7041346868036643, + "grad_norm": 2.643043529137907, + "learning_rate": 6.376020183822787e-07, + "loss": 0.5079, + "step": 17064 + }, + { + "epoch": 0.7041759511430222, + "grad_norm": 2.7822310396991488, + "learning_rate": 6.374379963762499e-07, + "loss": 0.559, + "step": 17065 + }, + { + "epoch": 0.7042172154823801, + "grad_norm": 2.084984595858799, + "learning_rate": 6.37273989777737e-07, + "loss": 0.5053, + "step": 17066 + }, + { + "epoch": 0.7042584798217381, + "grad_norm": 4.144455191304554, + "learning_rate": 6.37109998589668e-07, + "loss": 0.4809, + "step": 17067 + }, + { + "epoch": 0.704299744161096, + "grad_norm": 3.7205300731414375, + "learning_rate": 6.369460228149731e-07, + "loss": 0.5149, + "step": 17068 + }, + { + "epoch": 0.7043410085004539, + "grad_norm": 4.748306058887769, + "learning_rate": 6.367820624565814e-07, + "loss": 0.5453, + "step": 17069 + }, + { + "epoch": 0.7043822728398118, + "grad_norm": 3.3774685140716842, + "learning_rate": 6.36618117517422e-07, + "loss": 0.5321, + "step": 17070 + }, + { + "epoch": 0.7044235371791697, + "grad_norm": 3.37707217748255, + "learning_rate": 6.364541880004224e-07, + "loss": 0.5458, + "step": 17071 + }, + { + "epoch": 0.7044648015185276, + "grad_norm": 14.445556329676117, + "learning_rate": 6.36290273908511e-07, + "loss": 0.5284, + "step": 17072 + }, + { + "epoch": 0.7045060658578857, + "grad_norm": 13.358518772096366, + "learning_rate": 6.361263752446162e-07, + "loss": 0.6049, + "step": 17073 + }, + { + "epoch": 0.7045473301972436, + "grad_norm": 9.266691647984613, + "learning_rate": 6.359624920116659e-07, + "loss": 0.4975, + "step": 17074 + }, + { + "epoch": 0.7045885945366015, + "grad_norm": 3.3792929687761357, + "learning_rate": 6.357986242125864e-07, + "loss": 0.4665, + "step": 17075 + }, + { + "epoch": 0.7046298588759594, + "grad_norm": 3.067918002301056, + "learning_rate": 6.35634771850306e-07, + "loss": 0.4543, + "step": 17076 + }, + { + "epoch": 0.7046711232153173, + "grad_norm": 4.170765870332371, + "learning_rate": 6.354709349277503e-07, + "loss": 0.5243, + "step": 17077 + }, + { + "epoch": 0.7047123875546752, + "grad_norm": 1.8689807529507478, + "learning_rate": 6.353071134478469e-07, + "loss": 0.5133, + "step": 17078 + }, + { + "epoch": 0.7047536518940332, + "grad_norm": 3.2128273645441148, + "learning_rate": 6.351433074135212e-07, + "loss": 0.5334, + "step": 17079 + }, + { + "epoch": 0.7047949162333911, + "grad_norm": 2.2649388216031787, + "learning_rate": 6.349795168276994e-07, + "loss": 0.4912, + "step": 17080 + }, + { + "epoch": 0.704836180572749, + "grad_norm": 3.3254850064321313, + "learning_rate": 6.348157416933076e-07, + "loss": 0.5587, + "step": 17081 + }, + { + "epoch": 0.7048774449121069, + "grad_norm": 3.901632447534059, + "learning_rate": 6.346519820132714e-07, + "loss": 0.4591, + "step": 17082 + }, + { + "epoch": 0.7049187092514649, + "grad_norm": 7.340674255410581, + "learning_rate": 6.34488237790515e-07, + "loss": 0.5736, + "step": 17083 + }, + { + "epoch": 0.7049599735908229, + "grad_norm": 18.476003445117154, + "learning_rate": 6.343245090279639e-07, + "loss": 0.4856, + "step": 17084 + }, + { + "epoch": 0.7050012379301808, + "grad_norm": 2.4429318410859717, + "learning_rate": 6.341607957285425e-07, + "loss": 0.5381, + "step": 17085 + }, + { + "epoch": 0.7050425022695387, + "grad_norm": 6.629346252461505, + "learning_rate": 6.339970978951762e-07, + "loss": 0.5215, + "step": 17086 + }, + { + "epoch": 0.7050837666088966, + "grad_norm": 2.496750537826101, + "learning_rate": 6.338334155307872e-07, + "loss": 0.5191, + "step": 17087 + }, + { + "epoch": 0.7051250309482545, + "grad_norm": 5.88209991007622, + "learning_rate": 6.336697486383005e-07, + "loss": 0.4947, + "step": 17088 + }, + { + "epoch": 0.7051662952876124, + "grad_norm": 6.747219908614114, + "learning_rate": 6.335060972206397e-07, + "loss": 0.574, + "step": 17089 + }, + { + "epoch": 0.7052075596269703, + "grad_norm": 3.518406806119274, + "learning_rate": 6.333424612807272e-07, + "loss": 0.4828, + "step": 17090 + }, + { + "epoch": 0.7052488239663283, + "grad_norm": 24.327443736109835, + "learning_rate": 6.331788408214869e-07, + "loss": 0.5037, + "step": 17091 + }, + { + "epoch": 0.7052900883056862, + "grad_norm": 2.5658298840006815, + "learning_rate": 6.330152358458403e-07, + "loss": 0.4773, + "step": 17092 + }, + { + "epoch": 0.7053313526450442, + "grad_norm": 3.08126511745209, + "learning_rate": 6.328516463567106e-07, + "loss": 0.5476, + "step": 17093 + }, + { + "epoch": 0.7053726169844021, + "grad_norm": 7.801428485239671, + "learning_rate": 6.326880723570197e-07, + "loss": 0.5229, + "step": 17094 + }, + { + "epoch": 0.70541388132376, + "grad_norm": 3.6040355771435664, + "learning_rate": 6.325245138496902e-07, + "loss": 0.4832, + "step": 17095 + }, + { + "epoch": 0.705455145663118, + "grad_norm": 3.6655859321292126, + "learning_rate": 6.323609708376422e-07, + "loss": 0.594, + "step": 17096 + }, + { + "epoch": 0.7054964100024759, + "grad_norm": 4.326005762140209, + "learning_rate": 6.321974433237977e-07, + "loss": 0.4949, + "step": 17097 + }, + { + "epoch": 0.7055376743418338, + "grad_norm": 6.029803930897545, + "learning_rate": 6.320339313110786e-07, + "loss": 0.5019, + "step": 17098 + }, + { + "epoch": 0.7055789386811917, + "grad_norm": 2.3660642104032217, + "learning_rate": 6.318704348024041e-07, + "loss": 0.5225, + "step": 17099 + }, + { + "epoch": 0.7056202030205496, + "grad_norm": 2.6278187399041997, + "learning_rate": 6.317069538006955e-07, + "loss": 0.5199, + "step": 17100 + }, + { + "epoch": 0.7056614673599075, + "grad_norm": 3.0619936726031853, + "learning_rate": 6.315434883088726e-07, + "loss": 0.4926, + "step": 17101 + }, + { + "epoch": 0.7057027316992655, + "grad_norm": 2.786020132052203, + "learning_rate": 6.313800383298562e-07, + "loss": 0.5469, + "step": 17102 + }, + { + "epoch": 0.7057439960386234, + "grad_norm": 2.8098238603545393, + "learning_rate": 6.312166038665654e-07, + "loss": 0.5245, + "step": 17103 + }, + { + "epoch": 0.7057852603779814, + "grad_norm": 2.6974355564867913, + "learning_rate": 6.310531849219188e-07, + "loss": 0.47, + "step": 17104 + }, + { + "epoch": 0.7058265247173393, + "grad_norm": 36.83119594320831, + "learning_rate": 6.308897814988361e-07, + "loss": 0.5457, + "step": 17105 + }, + { + "epoch": 0.7058677890566972, + "grad_norm": 3.300637666661735, + "learning_rate": 6.307263936002359e-07, + "loss": 0.5238, + "step": 17106 + }, + { + "epoch": 0.7059090533960551, + "grad_norm": 2.327822503550994, + "learning_rate": 6.305630212290377e-07, + "loss": 0.472, + "step": 17107 + }, + { + "epoch": 0.7059503177354131, + "grad_norm": 2.9693616375363274, + "learning_rate": 6.303996643881582e-07, + "loss": 0.5258, + "step": 17108 + }, + { + "epoch": 0.705991582074771, + "grad_norm": 3.7376641787673237, + "learning_rate": 6.302363230805163e-07, + "loss": 0.5244, + "step": 17109 + }, + { + "epoch": 0.7060328464141289, + "grad_norm": 146.10649013650956, + "learning_rate": 6.300729973090294e-07, + "loss": 0.5435, + "step": 17110 + }, + { + "epoch": 0.7060741107534868, + "grad_norm": 3.192799279965708, + "learning_rate": 6.299096870766155e-07, + "loss": 0.4945, + "step": 17111 + }, + { + "epoch": 0.7061153750928447, + "grad_norm": 246.1199445466562, + "learning_rate": 6.297463923861906e-07, + "loss": 0.5577, + "step": 17112 + }, + { + "epoch": 0.7061566394322026, + "grad_norm": 2.5657562712793958, + "learning_rate": 6.295831132406723e-07, + "loss": 0.5526, + "step": 17113 + }, + { + "epoch": 0.7061979037715607, + "grad_norm": 2.819280212496382, + "learning_rate": 6.29419849642977e-07, + "loss": 0.5358, + "step": 17114 + }, + { + "epoch": 0.7062391681109186, + "grad_norm": 3.236157676501125, + "learning_rate": 6.292566015960215e-07, + "loss": 0.5627, + "step": 17115 + }, + { + "epoch": 0.7062804324502765, + "grad_norm": 3.342645496843154, + "learning_rate": 6.290933691027214e-07, + "loss": 0.4876, + "step": 17116 + }, + { + "epoch": 0.7063216967896344, + "grad_norm": 3.0251258415413185, + "learning_rate": 6.28930152165992e-07, + "loss": 0.5224, + "step": 17117 + }, + { + "epoch": 0.7063629611289923, + "grad_norm": 2.6699081322526457, + "learning_rate": 6.28766950788749e-07, + "loss": 0.4887, + "step": 17118 + }, + { + "epoch": 0.7064042254683502, + "grad_norm": 5.697758716964184, + "learning_rate": 6.286037649739083e-07, + "loss": 0.4594, + "step": 17119 + }, + { + "epoch": 0.7064454898077082, + "grad_norm": 2.442390405628235, + "learning_rate": 6.284405947243839e-07, + "loss": 0.517, + "step": 17120 + }, + { + "epoch": 0.7064867541470661, + "grad_norm": 2.832307695679355, + "learning_rate": 6.282774400430907e-07, + "loss": 0.5478, + "step": 17121 + }, + { + "epoch": 0.706528018486424, + "grad_norm": 22.01553157615833, + "learning_rate": 6.28114300932943e-07, + "loss": 0.4494, + "step": 17122 + }, + { + "epoch": 0.7065692828257819, + "grad_norm": 2.119019226439298, + "learning_rate": 6.279511773968557e-07, + "loss": 0.5414, + "step": 17123 + }, + { + "epoch": 0.7066105471651399, + "grad_norm": 6.624955492062685, + "learning_rate": 6.277880694377414e-07, + "loss": 0.442, + "step": 17124 + }, + { + "epoch": 0.7066518115044979, + "grad_norm": 2.9785177098317455, + "learning_rate": 6.27624977058514e-07, + "loss": 0.5202, + "step": 17125 + }, + { + "epoch": 0.7066930758438558, + "grad_norm": 2.755071103683871, + "learning_rate": 6.274619002620868e-07, + "loss": 0.513, + "step": 17126 + }, + { + "epoch": 0.7067343401832137, + "grad_norm": 4.661428809451791, + "learning_rate": 6.272988390513734e-07, + "loss": 0.5157, + "step": 17127 + }, + { + "epoch": 0.7067756045225716, + "grad_norm": 2.835241017596032, + "learning_rate": 6.271357934292855e-07, + "loss": 0.5497, + "step": 17128 + }, + { + "epoch": 0.7068168688619295, + "grad_norm": 2.865565600117001, + "learning_rate": 6.269727633987364e-07, + "loss": 0.5331, + "step": 17129 + }, + { + "epoch": 0.7068581332012874, + "grad_norm": 7.460595251610709, + "learning_rate": 6.268097489626371e-07, + "loss": 0.5355, + "step": 17130 + }, + { + "epoch": 0.7068993975406453, + "grad_norm": 4.239764238139804, + "learning_rate": 6.266467501239001e-07, + "loss": 0.5273, + "step": 17131 + }, + { + "epoch": 0.7069406618800033, + "grad_norm": 2.5114077712545813, + "learning_rate": 6.264837668854376e-07, + "loss": 0.4304, + "step": 17132 + }, + { + "epoch": 0.7069819262193612, + "grad_norm": 6.077493883082151, + "learning_rate": 6.263207992501595e-07, + "loss": 0.4365, + "step": 17133 + }, + { + "epoch": 0.7070231905587192, + "grad_norm": 3.365719689689399, + "learning_rate": 6.261578472209776e-07, + "loss": 0.5067, + "step": 17134 + }, + { + "epoch": 0.7070644548980771, + "grad_norm": 7.040728274251566, + "learning_rate": 6.259949108008026e-07, + "loss": 0.5172, + "step": 17135 + }, + { + "epoch": 0.707105719237435, + "grad_norm": 3.404545928694749, + "learning_rate": 6.258319899925453e-07, + "loss": 0.4749, + "step": 17136 + }, + { + "epoch": 0.707146983576793, + "grad_norm": 2.13943513196081, + "learning_rate": 6.256690847991151e-07, + "loss": 0.5443, + "step": 17137 + }, + { + "epoch": 0.7071882479161509, + "grad_norm": 7.19433275924279, + "learning_rate": 6.255061952234223e-07, + "loss": 0.4668, + "step": 17138 + }, + { + "epoch": 0.7072295122555088, + "grad_norm": 6.805467454345086, + "learning_rate": 6.25343321268377e-07, + "loss": 0.5325, + "step": 17139 + }, + { + "epoch": 0.7072707765948667, + "grad_norm": 2.773301082060474, + "learning_rate": 6.251804629368876e-07, + "loss": 0.5572, + "step": 17140 + }, + { + "epoch": 0.7073120409342246, + "grad_norm": 2.0366219355889834, + "learning_rate": 6.250176202318635e-07, + "loss": 0.5155, + "step": 17141 + }, + { + "epoch": 0.7073533052735825, + "grad_norm": 2.0299361107078124, + "learning_rate": 6.248547931562141e-07, + "loss": 0.5002, + "step": 17142 + }, + { + "epoch": 0.7073945696129404, + "grad_norm": 3.414537733677815, + "learning_rate": 6.246919817128469e-07, + "loss": 0.5373, + "step": 17143 + }, + { + "epoch": 0.7074358339522985, + "grad_norm": 2.5198212071942234, + "learning_rate": 6.245291859046711e-07, + "loss": 0.5327, + "step": 17144 + }, + { + "epoch": 0.7074770982916564, + "grad_norm": 3.4807287405764002, + "learning_rate": 6.243664057345936e-07, + "loss": 0.5006, + "step": 17145 + }, + { + "epoch": 0.7075183626310143, + "grad_norm": 13.352069376321436, + "learning_rate": 6.242036412055224e-07, + "loss": 0.5216, + "step": 17146 + }, + { + "epoch": 0.7075596269703722, + "grad_norm": 3.590269339614306, + "learning_rate": 6.240408923203653e-07, + "loss": 0.516, + "step": 17147 + }, + { + "epoch": 0.7076008913097301, + "grad_norm": 3.002681916663015, + "learning_rate": 6.238781590820297e-07, + "loss": 0.4559, + "step": 17148 + }, + { + "epoch": 0.7076421556490881, + "grad_norm": 2.6105433905925315, + "learning_rate": 6.237154414934213e-07, + "loss": 0.4719, + "step": 17149 + }, + { + "epoch": 0.707683419988446, + "grad_norm": 7.602515775027371, + "learning_rate": 6.235527395574473e-07, + "loss": 0.5614, + "step": 17150 + }, + { + "epoch": 0.7077246843278039, + "grad_norm": 2.558257528688214, + "learning_rate": 6.233900532770138e-07, + "loss": 0.5228, + "step": 17151 + }, + { + "epoch": 0.7077659486671618, + "grad_norm": 2.612811051981839, + "learning_rate": 6.232273826550276e-07, + "loss": 0.468, + "step": 17152 + }, + { + "epoch": 0.7078072130065197, + "grad_norm": 4.853038285892435, + "learning_rate": 6.23064727694393e-07, + "loss": 0.5333, + "step": 17153 + }, + { + "epoch": 0.7078484773458777, + "grad_norm": 5.807052962969389, + "learning_rate": 6.229020883980162e-07, + "loss": 0.4743, + "step": 17154 + }, + { + "epoch": 0.7078897416852357, + "grad_norm": 1.9604850631548287, + "learning_rate": 6.227394647688028e-07, + "loss": 0.4793, + "step": 17155 + }, + { + "epoch": 0.7079310060245936, + "grad_norm": 2.2563160403940574, + "learning_rate": 6.225768568096571e-07, + "loss": 0.5473, + "step": 17156 + }, + { + "epoch": 0.7079722703639515, + "grad_norm": 2.5406061600854963, + "learning_rate": 6.224142645234831e-07, + "loss": 0.5887, + "step": 17157 + }, + { + "epoch": 0.7080135347033094, + "grad_norm": 5.017068932366874, + "learning_rate": 6.222516879131858e-07, + "loss": 0.5057, + "step": 17158 + }, + { + "epoch": 0.7080547990426673, + "grad_norm": 4.399597326764595, + "learning_rate": 6.220891269816692e-07, + "loss": 0.5192, + "step": 17159 + }, + { + "epoch": 0.7080960633820252, + "grad_norm": 5.111867190377165, + "learning_rate": 6.219265817318375e-07, + "loss": 0.5693, + "step": 17160 + }, + { + "epoch": 0.7081373277213832, + "grad_norm": 2.543713364813817, + "learning_rate": 6.217640521665931e-07, + "loss": 0.5183, + "step": 17161 + }, + { + "epoch": 0.7081785920607411, + "grad_norm": 2.509631658891277, + "learning_rate": 6.216015382888398e-07, + "loss": 0.5511, + "step": 17162 + }, + { + "epoch": 0.708219856400099, + "grad_norm": 5.472219212538169, + "learning_rate": 6.214390401014803e-07, + "loss": 0.5688, + "step": 17163 + }, + { + "epoch": 0.7082611207394569, + "grad_norm": 2.716799519552623, + "learning_rate": 6.212765576074179e-07, + "loss": 0.5162, + "step": 17164 + }, + { + "epoch": 0.7083023850788149, + "grad_norm": 3.4391404514432575, + "learning_rate": 6.21114090809554e-07, + "loss": 0.4901, + "step": 17165 + }, + { + "epoch": 0.7083436494181728, + "grad_norm": 2.257790614543114, + "learning_rate": 6.209516397107911e-07, + "loss": 0.543, + "step": 17166 + }, + { + "epoch": 0.7083849137575308, + "grad_norm": 2.8010664144114465, + "learning_rate": 6.207892043140309e-07, + "loss": 0.5176, + "step": 17167 + }, + { + "epoch": 0.7084261780968887, + "grad_norm": 2.4349208186555584, + "learning_rate": 6.206267846221755e-07, + "loss": 0.5073, + "step": 17168 + }, + { + "epoch": 0.7084674424362466, + "grad_norm": 1.9710422846933435, + "learning_rate": 6.204643806381254e-07, + "loss": 0.4913, + "step": 17169 + }, + { + "epoch": 0.7085087067756045, + "grad_norm": 3.0401192519640365, + "learning_rate": 6.203019923647815e-07, + "loss": 0.4553, + "step": 17170 + }, + { + "epoch": 0.7085499711149624, + "grad_norm": 3.2795922480286777, + "learning_rate": 6.201396198050444e-07, + "loss": 0.5533, + "step": 17171 + }, + { + "epoch": 0.7085912354543203, + "grad_norm": 8.02713271992714, + "learning_rate": 6.199772629618149e-07, + "loss": 0.4913, + "step": 17172 + }, + { + "epoch": 0.7086324997936783, + "grad_norm": 3.039865131844447, + "learning_rate": 6.198149218379936e-07, + "loss": 0.4982, + "step": 17173 + }, + { + "epoch": 0.7086737641330362, + "grad_norm": 3.233195629441338, + "learning_rate": 6.19652596436479e-07, + "loss": 0.5273, + "step": 17174 + }, + { + "epoch": 0.7087150284723942, + "grad_norm": 3.4153429711294945, + "learning_rate": 6.194902867601713e-07, + "loss": 0.5156, + "step": 17175 + }, + { + "epoch": 0.7087562928117521, + "grad_norm": 3.079622568536151, + "learning_rate": 6.193279928119704e-07, + "loss": 0.5024, + "step": 17176 + }, + { + "epoch": 0.70879755715111, + "grad_norm": 2.645092625641787, + "learning_rate": 6.19165714594774e-07, + "loss": 0.4928, + "step": 17177 + }, + { + "epoch": 0.708838821490468, + "grad_norm": 3.147847477101248, + "learning_rate": 6.190034521114817e-07, + "loss": 0.4892, + "step": 17178 + }, + { + "epoch": 0.7088800858298259, + "grad_norm": 17.595756663442234, + "learning_rate": 6.188412053649913e-07, + "loss": 0.4764, + "step": 17179 + }, + { + "epoch": 0.7089213501691838, + "grad_norm": 2.905305597466288, + "learning_rate": 6.18678974358202e-07, + "loss": 0.4506, + "step": 17180 + }, + { + "epoch": 0.7089626145085417, + "grad_norm": 3.656550404827023, + "learning_rate": 6.18516759094011e-07, + "loss": 0.5267, + "step": 17181 + }, + { + "epoch": 0.7090038788478996, + "grad_norm": 4.130207519712002, + "learning_rate": 6.183545595753154e-07, + "loss": 0.4976, + "step": 17182 + }, + { + "epoch": 0.7090451431872575, + "grad_norm": 2.3616663368062367, + "learning_rate": 6.181923758050127e-07, + "loss": 0.5291, + "step": 17183 + }, + { + "epoch": 0.7090864075266154, + "grad_norm": 2.51138033859969, + "learning_rate": 6.180302077860001e-07, + "loss": 0.5044, + "step": 17184 + }, + { + "epoch": 0.7091276718659735, + "grad_norm": 4.441742010727727, + "learning_rate": 6.178680555211749e-07, + "loss": 0.509, + "step": 17185 + }, + { + "epoch": 0.7091689362053314, + "grad_norm": 5.9247167599082955, + "learning_rate": 6.177059190134327e-07, + "loss": 0.5237, + "step": 17186 + }, + { + "epoch": 0.7092102005446893, + "grad_norm": 2.5465011523271293, + "learning_rate": 6.175437982656695e-07, + "loss": 0.5255, + "step": 17187 + }, + { + "epoch": 0.7092514648840472, + "grad_norm": 2.221372687693395, + "learning_rate": 6.173816932807819e-07, + "loss": 0.5193, + "step": 17188 + }, + { + "epoch": 0.7092927292234051, + "grad_norm": 4.105208188557903, + "learning_rate": 6.172196040616659e-07, + "loss": 0.5423, + "step": 17189 + }, + { + "epoch": 0.709333993562763, + "grad_norm": 3.165932904557723, + "learning_rate": 6.170575306112155e-07, + "loss": 0.5082, + "step": 17190 + }, + { + "epoch": 0.709375257902121, + "grad_norm": 3.2729395699943487, + "learning_rate": 6.168954729323262e-07, + "loss": 0.4465, + "step": 17191 + }, + { + "epoch": 0.7094165222414789, + "grad_norm": 3.4868244963876744, + "learning_rate": 6.16733431027893e-07, + "loss": 0.4975, + "step": 17192 + }, + { + "epoch": 0.7094577865808368, + "grad_norm": 4.044277989947073, + "learning_rate": 6.16571404900811e-07, + "loss": 0.4609, + "step": 17193 + }, + { + "epoch": 0.7094990509201947, + "grad_norm": 2.2418934489658953, + "learning_rate": 6.164093945539731e-07, + "loss": 0.4914, + "step": 17194 + }, + { + "epoch": 0.7095403152595527, + "grad_norm": 2.521324536762026, + "learning_rate": 6.162473999902744e-07, + "loss": 0.5217, + "step": 17195 + }, + { + "epoch": 0.7095815795989107, + "grad_norm": 9.231224902793548, + "learning_rate": 6.160854212126074e-07, + "loss": 0.496, + "step": 17196 + }, + { + "epoch": 0.7096228439382686, + "grad_norm": 3.2231131038760075, + "learning_rate": 6.159234582238663e-07, + "loss": 0.5101, + "step": 17197 + }, + { + "epoch": 0.7096641082776265, + "grad_norm": 4.281364633521346, + "learning_rate": 6.157615110269436e-07, + "loss": 0.5471, + "step": 17198 + }, + { + "epoch": 0.7097053726169844, + "grad_norm": 3.1090404212350435, + "learning_rate": 6.155995796247323e-07, + "loss": 0.4823, + "step": 17199 + }, + { + "epoch": 0.7097466369563423, + "grad_norm": 7.636491081707436, + "learning_rate": 6.154376640201249e-07, + "loss": 0.4908, + "step": 17200 + }, + { + "epoch": 0.7097879012957002, + "grad_norm": 2.8644640233455565, + "learning_rate": 6.152757642160142e-07, + "loss": 0.5405, + "step": 17201 + }, + { + "epoch": 0.7098291656350582, + "grad_norm": 2.2600673343937085, + "learning_rate": 6.15113880215291e-07, + "loss": 0.4953, + "step": 17202 + }, + { + "epoch": 0.7098704299744161, + "grad_norm": 6.170574195581512, + "learning_rate": 6.149520120208476e-07, + "loss": 0.5332, + "step": 17203 + }, + { + "epoch": 0.709911694313774, + "grad_norm": 2.1946471705098403, + "learning_rate": 6.147901596355753e-07, + "loss": 0.5391, + "step": 17204 + }, + { + "epoch": 0.709952958653132, + "grad_norm": 5.972832292467492, + "learning_rate": 6.146283230623657e-07, + "loss": 0.516, + "step": 17205 + }, + { + "epoch": 0.7099942229924899, + "grad_norm": 14.555216343147366, + "learning_rate": 6.144665023041087e-07, + "loss": 0.4725, + "step": 17206 + }, + { + "epoch": 0.7100354873318478, + "grad_norm": 2.314729509584745, + "learning_rate": 6.143046973636952e-07, + "loss": 0.5757, + "step": 17207 + }, + { + "epoch": 0.7100767516712058, + "grad_norm": 2.8291885955392053, + "learning_rate": 6.141429082440159e-07, + "loss": 0.4864, + "step": 17208 + }, + { + "epoch": 0.7101180160105637, + "grad_norm": 6.6523269540207615, + "learning_rate": 6.139811349479598e-07, + "loss": 0.5002, + "step": 17209 + }, + { + "epoch": 0.7101592803499216, + "grad_norm": 7.092485461373805, + "learning_rate": 6.138193774784179e-07, + "loss": 0.4577, + "step": 17210 + }, + { + "epoch": 0.7102005446892795, + "grad_norm": 2.9162017104571016, + "learning_rate": 6.13657635838278e-07, + "loss": 0.5033, + "step": 17211 + }, + { + "epoch": 0.7102418090286374, + "grad_norm": 2.8383646204721322, + "learning_rate": 6.1349591003043e-07, + "loss": 0.4762, + "step": 17212 + }, + { + "epoch": 0.7102830733679953, + "grad_norm": 4.70393998957163, + "learning_rate": 6.133342000577629e-07, + "loss": 0.5072, + "step": 17213 + }, + { + "epoch": 0.7103243377073533, + "grad_norm": 2.9266377895662243, + "learning_rate": 6.131725059231656e-07, + "loss": 0.5377, + "step": 17214 + }, + { + "epoch": 0.7103656020467113, + "grad_norm": 4.10327610547898, + "learning_rate": 6.130108276295253e-07, + "loss": 0.4617, + "step": 17215 + }, + { + "epoch": 0.7104068663860692, + "grad_norm": 2.2757427704895816, + "learning_rate": 6.128491651797304e-07, + "loss": 0.54, + "step": 17216 + }, + { + "epoch": 0.7104481307254271, + "grad_norm": 3.1086550404773505, + "learning_rate": 6.126875185766693e-07, + "loss": 0.5528, + "step": 17217 + }, + { + "epoch": 0.710489395064785, + "grad_norm": 2.6299969732223016, + "learning_rate": 6.125258878232284e-07, + "loss": 0.4973, + "step": 17218 + }, + { + "epoch": 0.710530659404143, + "grad_norm": 2.0659949961808826, + "learning_rate": 6.123642729222953e-07, + "loss": 0.5007, + "step": 17219 + }, + { + "epoch": 0.7105719237435009, + "grad_norm": 2.2613100667902, + "learning_rate": 6.122026738767565e-07, + "loss": 0.5293, + "step": 17220 + }, + { + "epoch": 0.7106131880828588, + "grad_norm": 2.996499589721103, + "learning_rate": 6.120410906894998e-07, + "loss": 0.4895, + "step": 17221 + }, + { + "epoch": 0.7106544524222167, + "grad_norm": 2.6936214956719624, + "learning_rate": 6.118795233634102e-07, + "loss": 0.4761, + "step": 17222 + }, + { + "epoch": 0.7106957167615746, + "grad_norm": 3.494747735338411, + "learning_rate": 6.117179719013739e-07, + "loss": 0.5524, + "step": 17223 + }, + { + "epoch": 0.7107369811009325, + "grad_norm": 2.2595042519558897, + "learning_rate": 6.115564363062764e-07, + "loss": 0.4767, + "step": 17224 + }, + { + "epoch": 0.7107782454402904, + "grad_norm": 3.8662326423328905, + "learning_rate": 6.113949165810037e-07, + "loss": 0.5273, + "step": 17225 + }, + { + "epoch": 0.7108195097796485, + "grad_norm": 2.8827000579908284, + "learning_rate": 6.112334127284412e-07, + "loss": 0.4554, + "step": 17226 + }, + { + "epoch": 0.7108607741190064, + "grad_norm": 4.665773600753577, + "learning_rate": 6.110719247514729e-07, + "loss": 0.4771, + "step": 17227 + }, + { + "epoch": 0.7109020384583643, + "grad_norm": 2.3653073688481916, + "learning_rate": 6.109104526529837e-07, + "loss": 0.5458, + "step": 17228 + }, + { + "epoch": 0.7109433027977222, + "grad_norm": 2.2380034818556527, + "learning_rate": 6.10748996435858e-07, + "loss": 0.4821, + "step": 17229 + }, + { + "epoch": 0.7109845671370801, + "grad_norm": 2.8214758684152197, + "learning_rate": 6.1058755610298e-07, + "loss": 0.5265, + "step": 17230 + }, + { + "epoch": 0.711025831476438, + "grad_norm": 2.6018886728538586, + "learning_rate": 6.10426131657233e-07, + "loss": 0.4474, + "step": 17231 + }, + { + "epoch": 0.711067095815796, + "grad_norm": 3.158404017676373, + "learning_rate": 6.102647231015007e-07, + "loss": 0.5667, + "step": 17232 + }, + { + "epoch": 0.7111083601551539, + "grad_norm": 3.5793023193898144, + "learning_rate": 6.101033304386661e-07, + "loss": 0.5208, + "step": 17233 + }, + { + "epoch": 0.7111496244945118, + "grad_norm": 9.337104947511456, + "learning_rate": 6.099419536716134e-07, + "loss": 0.5412, + "step": 17234 + }, + { + "epoch": 0.7111908888338697, + "grad_norm": 2.9420105004113397, + "learning_rate": 6.097805928032229e-07, + "loss": 0.5165, + "step": 17235 + }, + { + "epoch": 0.7112321531732277, + "grad_norm": 3.2555890374034635, + "learning_rate": 6.096192478363777e-07, + "loss": 0.5466, + "step": 17236 + }, + { + "epoch": 0.7112734175125857, + "grad_norm": 3.694466591019012, + "learning_rate": 6.094579187739604e-07, + "loss": 0.5007, + "step": 17237 + }, + { + "epoch": 0.7113146818519436, + "grad_norm": 2.6309162192282263, + "learning_rate": 6.092966056188531e-07, + "loss": 0.5473, + "step": 17238 + }, + { + "epoch": 0.7113559461913015, + "grad_norm": 2.2963689695125367, + "learning_rate": 6.091353083739358e-07, + "loss": 0.5327, + "step": 17239 + }, + { + "epoch": 0.7113972105306594, + "grad_norm": 21.98232062151985, + "learning_rate": 6.089740270420907e-07, + "loss": 0.5365, + "step": 17240 + }, + { + "epoch": 0.7114384748700173, + "grad_norm": 2.227174581838854, + "learning_rate": 6.088127616261983e-07, + "loss": 0.5116, + "step": 17241 + }, + { + "epoch": 0.7114797392093752, + "grad_norm": 3.0558105162597164, + "learning_rate": 6.086515121291402e-07, + "loss": 0.5243, + "step": 17242 + }, + { + "epoch": 0.7115210035487332, + "grad_norm": 2.673858026415521, + "learning_rate": 6.084902785537951e-07, + "loss": 0.5346, + "step": 17243 + }, + { + "epoch": 0.7115622678880911, + "grad_norm": 3.282005568954562, + "learning_rate": 6.08329060903044e-07, + "loss": 0.5022, + "step": 17244 + }, + { + "epoch": 0.711603532227449, + "grad_norm": 4.072695136322365, + "learning_rate": 6.081678591797665e-07, + "loss": 0.5166, + "step": 17245 + }, + { + "epoch": 0.711644796566807, + "grad_norm": 3.3665330285787434, + "learning_rate": 6.080066733868425e-07, + "loss": 0.5261, + "step": 17246 + }, + { + "epoch": 0.7116860609061649, + "grad_norm": 3.5910458806259298, + "learning_rate": 6.078455035271508e-07, + "loss": 0.5515, + "step": 17247 + }, + { + "epoch": 0.7117273252455228, + "grad_norm": 5.40427193175009, + "learning_rate": 6.076843496035697e-07, + "loss": 0.4795, + "step": 17248 + }, + { + "epoch": 0.7117685895848808, + "grad_norm": 16.029610726096024, + "learning_rate": 6.075232116189783e-07, + "loss": 0.4646, + "step": 17249 + }, + { + "epoch": 0.7118098539242387, + "grad_norm": 2.557445170170648, + "learning_rate": 6.07362089576255e-07, + "loss": 0.5108, + "step": 17250 + }, + { + "epoch": 0.7118511182635966, + "grad_norm": 5.983565525393309, + "learning_rate": 6.072009834782783e-07, + "loss": 0.5107, + "step": 17251 + }, + { + "epoch": 0.7118923826029545, + "grad_norm": 3.417373983345073, + "learning_rate": 6.07039893327925e-07, + "loss": 0.5794, + "step": 17252 + }, + { + "epoch": 0.7119336469423124, + "grad_norm": 3.2365778360207598, + "learning_rate": 6.068788191280731e-07, + "loss": 0.5384, + "step": 17253 + }, + { + "epoch": 0.7119749112816703, + "grad_norm": 2.7347034729976833, + "learning_rate": 6.067177608816003e-07, + "loss": 0.4473, + "step": 17254 + }, + { + "epoch": 0.7120161756210283, + "grad_norm": 3.6086669346162763, + "learning_rate": 6.065567185913823e-07, + "loss": 0.5489, + "step": 17255 + }, + { + "epoch": 0.7120574399603863, + "grad_norm": 3.3723161498277645, + "learning_rate": 6.063956922602963e-07, + "loss": 0.5755, + "step": 17256 + }, + { + "epoch": 0.7120987042997442, + "grad_norm": 4.0440500779141235, + "learning_rate": 6.062346818912188e-07, + "loss": 0.5517, + "step": 17257 + }, + { + "epoch": 0.7121399686391021, + "grad_norm": 2.8191507446733812, + "learning_rate": 6.060736874870262e-07, + "loss": 0.5231, + "step": 17258 + }, + { + "epoch": 0.71218123297846, + "grad_norm": 9.111684675297985, + "learning_rate": 6.059127090505933e-07, + "loss": 0.5151, + "step": 17259 + }, + { + "epoch": 0.712222497317818, + "grad_norm": 10.016281443399428, + "learning_rate": 6.057517465847967e-07, + "loss": 0.5252, + "step": 17260 + }, + { + "epoch": 0.7122637616571759, + "grad_norm": 3.1486389229141696, + "learning_rate": 6.055908000925102e-07, + "loss": 0.5156, + "step": 17261 + }, + { + "epoch": 0.7123050259965338, + "grad_norm": 3.3266692347823223, + "learning_rate": 6.054298695766097e-07, + "loss": 0.4786, + "step": 17262 + }, + { + "epoch": 0.7123462903358917, + "grad_norm": 2.5541155196508445, + "learning_rate": 6.052689550399701e-07, + "loss": 0.5525, + "step": 17263 + }, + { + "epoch": 0.7123875546752496, + "grad_norm": 2.747692333891508, + "learning_rate": 6.051080564854648e-07, + "loss": 0.4977, + "step": 17264 + }, + { + "epoch": 0.7124288190146075, + "grad_norm": 2.7925951272529907, + "learning_rate": 6.04947173915968e-07, + "loss": 0.4959, + "step": 17265 + }, + { + "epoch": 0.7124700833539656, + "grad_norm": 31.501916422169987, + "learning_rate": 6.04786307334354e-07, + "loss": 0.5259, + "step": 17266 + }, + { + "epoch": 0.7125113476933235, + "grad_norm": 2.4740736025306465, + "learning_rate": 6.046254567434965e-07, + "loss": 0.5737, + "step": 17267 + }, + { + "epoch": 0.7125526120326814, + "grad_norm": 4.026421133196268, + "learning_rate": 6.044646221462676e-07, + "loss": 0.5012, + "step": 17268 + }, + { + "epoch": 0.7125938763720393, + "grad_norm": 13.152154978509165, + "learning_rate": 6.043038035455409e-07, + "loss": 0.527, + "step": 17269 + }, + { + "epoch": 0.7126351407113972, + "grad_norm": 7.65752328855284, + "learning_rate": 6.04143000944189e-07, + "loss": 0.5405, + "step": 17270 + }, + { + "epoch": 0.7126764050507551, + "grad_norm": 3.5963707224655503, + "learning_rate": 6.039822143450847e-07, + "loss": 0.5285, + "step": 17271 + }, + { + "epoch": 0.712717669390113, + "grad_norm": 2.177553913118344, + "learning_rate": 6.038214437510991e-07, + "loss": 0.5381, + "step": 17272 + }, + { + "epoch": 0.712758933729471, + "grad_norm": 2.533823049437737, + "learning_rate": 6.036606891651049e-07, + "loss": 0.5471, + "step": 17273 + }, + { + "epoch": 0.7128001980688289, + "grad_norm": 3.156106723311507, + "learning_rate": 6.034999505899725e-07, + "loss": 0.511, + "step": 17274 + }, + { + "epoch": 0.7128414624081868, + "grad_norm": 3.3116545830920914, + "learning_rate": 6.033392280285741e-07, + "loss": 0.5041, + "step": 17275 + }, + { + "epoch": 0.7128827267475448, + "grad_norm": 2.056063397138903, + "learning_rate": 6.031785214837799e-07, + "loss": 0.4958, + "step": 17276 + }, + { + "epoch": 0.7129239910869027, + "grad_norm": 6.198593893248641, + "learning_rate": 6.030178309584606e-07, + "loss": 0.4938, + "step": 17277 + }, + { + "epoch": 0.7129652554262607, + "grad_norm": 2.4435577720003328, + "learning_rate": 6.02857156455487e-07, + "loss": 0.4842, + "step": 17278 + }, + { + "epoch": 0.7130065197656186, + "grad_norm": 2.722074441028904, + "learning_rate": 6.026964979777292e-07, + "loss": 0.5372, + "step": 17279 + }, + { + "epoch": 0.7130477841049765, + "grad_norm": 5.932253118804353, + "learning_rate": 6.025358555280562e-07, + "loss": 0.4841, + "step": 17280 + }, + { + "epoch": 0.7130890484443344, + "grad_norm": 2.8235103960296484, + "learning_rate": 6.02375229109338e-07, + "loss": 0.5229, + "step": 17281 + }, + { + "epoch": 0.7131303127836923, + "grad_norm": 2.9448082308674635, + "learning_rate": 6.022146187244436e-07, + "loss": 0.5465, + "step": 17282 + }, + { + "epoch": 0.7131715771230502, + "grad_norm": 3.2736634041899486, + "learning_rate": 6.020540243762426e-07, + "loss": 0.5237, + "step": 17283 + }, + { + "epoch": 0.7132128414624082, + "grad_norm": 1.8674389122588402, + "learning_rate": 6.018934460676024e-07, + "loss": 0.4702, + "step": 17284 + }, + { + "epoch": 0.7132541058017661, + "grad_norm": 3.1598233098780066, + "learning_rate": 6.017328838013921e-07, + "loss": 0.4861, + "step": 17285 + }, + { + "epoch": 0.713295370141124, + "grad_norm": 2.5905420248451274, + "learning_rate": 6.015723375804797e-07, + "loss": 0.4828, + "step": 17286 + }, + { + "epoch": 0.713336634480482, + "grad_norm": 2.219576507213129, + "learning_rate": 6.014118074077333e-07, + "loss": 0.5551, + "step": 17287 + }, + { + "epoch": 0.7133778988198399, + "grad_norm": 2.4923454702316294, + "learning_rate": 6.012512932860198e-07, + "loss": 0.5172, + "step": 17288 + }, + { + "epoch": 0.7134191631591978, + "grad_norm": 2.6801010977296453, + "learning_rate": 6.010907952182062e-07, + "loss": 0.5019, + "step": 17289 + }, + { + "epoch": 0.7134604274985558, + "grad_norm": 2.2780577278849585, + "learning_rate": 6.009303132071597e-07, + "loss": 0.5391, + "step": 17290 + }, + { + "epoch": 0.7135016918379137, + "grad_norm": 3.884494389589871, + "learning_rate": 6.007698472557468e-07, + "loss": 0.5381, + "step": 17291 + }, + { + "epoch": 0.7135429561772716, + "grad_norm": 4.939859513104654, + "learning_rate": 6.006093973668346e-07, + "loss": 0.5101, + "step": 17292 + }, + { + "epoch": 0.7135842205166295, + "grad_norm": 4.320183095468127, + "learning_rate": 6.00448963543288e-07, + "loss": 0.4755, + "step": 17293 + }, + { + "epoch": 0.7136254848559874, + "grad_norm": 2.59827658405829, + "learning_rate": 6.002885457879732e-07, + "loss": 0.5535, + "step": 17294 + }, + { + "epoch": 0.7136667491953453, + "grad_norm": 3.499937604293675, + "learning_rate": 6.001281441037562e-07, + "loss": 0.4846, + "step": 17295 + }, + { + "epoch": 0.7137080135347033, + "grad_norm": 4.6898817262198245, + "learning_rate": 5.999677584935013e-07, + "loss": 0.5214, + "step": 17296 + }, + { + "epoch": 0.7137492778740613, + "grad_norm": 3.1528401411458913, + "learning_rate": 5.998073889600735e-07, + "loss": 0.5374, + "step": 17297 + }, + { + "epoch": 0.7137905422134192, + "grad_norm": 7.534063289147201, + "learning_rate": 5.996470355063379e-07, + "loss": 0.5067, + "step": 17298 + }, + { + "epoch": 0.7138318065527771, + "grad_norm": 6.421569297357762, + "learning_rate": 5.99486698135159e-07, + "loss": 0.5122, + "step": 17299 + }, + { + "epoch": 0.713873070892135, + "grad_norm": 3.2233392329473443, + "learning_rate": 5.993263768494005e-07, + "loss": 0.4648, + "step": 17300 + }, + { + "epoch": 0.713914335231493, + "grad_norm": 7.000974291396921, + "learning_rate": 5.991660716519254e-07, + "loss": 0.5422, + "step": 17301 + }, + { + "epoch": 0.7139555995708509, + "grad_norm": 3.5274375161675335, + "learning_rate": 5.990057825455979e-07, + "loss": 0.4775, + "step": 17302 + }, + { + "epoch": 0.7139968639102088, + "grad_norm": 2.6106473924228792, + "learning_rate": 5.98845509533281e-07, + "loss": 0.5475, + "step": 17303 + }, + { + "epoch": 0.7140381282495667, + "grad_norm": 2.693814830362419, + "learning_rate": 5.986852526178383e-07, + "loss": 0.5226, + "step": 17304 + }, + { + "epoch": 0.7140793925889246, + "grad_norm": 3.7084714641326055, + "learning_rate": 5.985250118021309e-07, + "loss": 0.5114, + "step": 17305 + }, + { + "epoch": 0.7141206569282825, + "grad_norm": 3.6984563381973476, + "learning_rate": 5.983647870890223e-07, + "loss": 0.5615, + "step": 17306 + }, + { + "epoch": 0.7141619212676406, + "grad_norm": 2.768289433019307, + "learning_rate": 5.982045784813739e-07, + "loss": 0.5317, + "step": 17307 + }, + { + "epoch": 0.7142031856069985, + "grad_norm": 2.4962434968713336, + "learning_rate": 5.980443859820482e-07, + "loss": 0.5131, + "step": 17308 + }, + { + "epoch": 0.7142444499463564, + "grad_norm": 8.130635787751958, + "learning_rate": 5.978842095939058e-07, + "loss": 0.5175, + "step": 17309 + }, + { + "epoch": 0.7142857142857143, + "grad_norm": 25.73425517514425, + "learning_rate": 5.977240493198081e-07, + "loss": 0.5589, + "step": 17310 + }, + { + "epoch": 0.7143269786250722, + "grad_norm": 3.310343759834051, + "learning_rate": 5.975639051626159e-07, + "loss": 0.5543, + "step": 17311 + }, + { + "epoch": 0.7143682429644301, + "grad_norm": 4.039313368050676, + "learning_rate": 5.974037771251904e-07, + "loss": 0.5096, + "step": 17312 + }, + { + "epoch": 0.714409507303788, + "grad_norm": 3.366795380226177, + "learning_rate": 5.972436652103916e-07, + "loss": 0.503, + "step": 17313 + }, + { + "epoch": 0.714450771643146, + "grad_norm": 7.966082302562425, + "learning_rate": 5.970835694210786e-07, + "loss": 0.5226, + "step": 17314 + }, + { + "epoch": 0.7144920359825039, + "grad_norm": 2.108316153319609, + "learning_rate": 5.969234897601119e-07, + "loss": 0.5067, + "step": 17315 + }, + { + "epoch": 0.7145333003218618, + "grad_norm": 2.372814754575412, + "learning_rate": 5.967634262303513e-07, + "loss": 0.5445, + "step": 17316 + }, + { + "epoch": 0.7145745646612198, + "grad_norm": 2.71704365242767, + "learning_rate": 5.966033788346549e-07, + "loss": 0.4567, + "step": 17317 + }, + { + "epoch": 0.7146158290005777, + "grad_norm": 2.913292616346512, + "learning_rate": 5.964433475758822e-07, + "loss": 0.5235, + "step": 17318 + }, + { + "epoch": 0.7146570933399357, + "grad_norm": 6.176942180958027, + "learning_rate": 5.962833324568916e-07, + "loss": 0.5084, + "step": 17319 + }, + { + "epoch": 0.7146983576792936, + "grad_norm": 3.5395016918915383, + "learning_rate": 5.961233334805421e-07, + "loss": 0.5026, + "step": 17320 + }, + { + "epoch": 0.7147396220186515, + "grad_norm": 2.2388156515334456, + "learning_rate": 5.959633506496903e-07, + "loss": 0.48, + "step": 17321 + }, + { + "epoch": 0.7147808863580094, + "grad_norm": 4.015075101288265, + "learning_rate": 5.958033839671946e-07, + "loss": 0.5666, + "step": 17322 + }, + { + "epoch": 0.7148221506973673, + "grad_norm": 5.6140153826782395, + "learning_rate": 5.956434334359126e-07, + "loss": 0.5209, + "step": 17323 + }, + { + "epoch": 0.7148634150367252, + "grad_norm": 2.7729953171841975, + "learning_rate": 5.954834990587015e-07, + "loss": 0.4754, + "step": 17324 + }, + { + "epoch": 0.7149046793760832, + "grad_norm": 2.841590384177163, + "learning_rate": 5.953235808384175e-07, + "loss": 0.5253, + "step": 17325 + }, + { + "epoch": 0.7149459437154411, + "grad_norm": 5.770052131999929, + "learning_rate": 5.951636787779179e-07, + "loss": 0.5211, + "step": 17326 + }, + { + "epoch": 0.7149872080547991, + "grad_norm": 3.3897608894832585, + "learning_rate": 5.950037928800582e-07, + "loss": 0.5416, + "step": 17327 + }, + { + "epoch": 0.715028472394157, + "grad_norm": 26.01177171802491, + "learning_rate": 5.948439231476948e-07, + "loss": 0.5045, + "step": 17328 + }, + { + "epoch": 0.7150697367335149, + "grad_norm": 3.7103253210970264, + "learning_rate": 5.946840695836835e-07, + "loss": 0.48, + "step": 17329 + }, + { + "epoch": 0.7151110010728728, + "grad_norm": 5.772311833377586, + "learning_rate": 5.945242321908792e-07, + "loss": 0.5261, + "step": 17330 + }, + { + "epoch": 0.7151522654122308, + "grad_norm": 7.393791637897107, + "learning_rate": 5.943644109721371e-07, + "loss": 0.5094, + "step": 17331 + }, + { + "epoch": 0.7151935297515887, + "grad_norm": 3.4300639989797745, + "learning_rate": 5.942046059303128e-07, + "loss": 0.565, + "step": 17332 + }, + { + "epoch": 0.7152347940909466, + "grad_norm": 2.3919461677333396, + "learning_rate": 5.940448170682596e-07, + "loss": 0.4947, + "step": 17333 + }, + { + "epoch": 0.7152760584303045, + "grad_norm": 3.5975316326257194, + "learning_rate": 5.938850443888324e-07, + "loss": 0.4841, + "step": 17334 + }, + { + "epoch": 0.7153173227696624, + "grad_norm": 2.678406070149769, + "learning_rate": 5.93725287894885e-07, + "loss": 0.5217, + "step": 17335 + }, + { + "epoch": 0.7153585871090203, + "grad_norm": 5.1966563382695234, + "learning_rate": 5.935655475892716e-07, + "loss": 0.4905, + "step": 17336 + }, + { + "epoch": 0.7153998514483784, + "grad_norm": 2.9007223358850025, + "learning_rate": 5.934058234748446e-07, + "loss": 0.4809, + "step": 17337 + }, + { + "epoch": 0.7154411157877363, + "grad_norm": 3.84759332906205, + "learning_rate": 5.932461155544576e-07, + "loss": 0.4921, + "step": 17338 + }, + { + "epoch": 0.7154823801270942, + "grad_norm": 12.096962253960202, + "learning_rate": 5.930864238309636e-07, + "loss": 0.5453, + "step": 17339 + }, + { + "epoch": 0.7155236444664521, + "grad_norm": 2.0687357090264595, + "learning_rate": 5.929267483072146e-07, + "loss": 0.5181, + "step": 17340 + }, + { + "epoch": 0.71556490880581, + "grad_norm": 2.010226988457638, + "learning_rate": 5.927670889860635e-07, + "loss": 0.5237, + "step": 17341 + }, + { + "epoch": 0.715606173145168, + "grad_norm": 3.3237644773805295, + "learning_rate": 5.926074458703611e-07, + "loss": 0.5168, + "step": 17342 + }, + { + "epoch": 0.7156474374845259, + "grad_norm": 4.399786573792847, + "learning_rate": 5.924478189629597e-07, + "loss": 0.5258, + "step": 17343 + }, + { + "epoch": 0.7156887018238838, + "grad_norm": 3.007107142328998, + "learning_rate": 5.922882082667106e-07, + "loss": 0.518, + "step": 17344 + }, + { + "epoch": 0.7157299661632417, + "grad_norm": 3.075451027538908, + "learning_rate": 5.921286137844654e-07, + "loss": 0.5021, + "step": 17345 + }, + { + "epoch": 0.7157712305025996, + "grad_norm": 10.052059239650003, + "learning_rate": 5.919690355190738e-07, + "loss": 0.522, + "step": 17346 + }, + { + "epoch": 0.7158124948419575, + "grad_norm": 3.300368604186498, + "learning_rate": 5.918094734733868e-07, + "loss": 0.4928, + "step": 17347 + }, + { + "epoch": 0.7158537591813156, + "grad_norm": 2.2511797296660845, + "learning_rate": 5.916499276502544e-07, + "loss": 0.5184, + "step": 17348 + }, + { + "epoch": 0.7158950235206735, + "grad_norm": 10.482343255861986, + "learning_rate": 5.914903980525273e-07, + "loss": 0.5829, + "step": 17349 + }, + { + "epoch": 0.7159362878600314, + "grad_norm": 2.6840529364607812, + "learning_rate": 5.913308846830539e-07, + "loss": 0.4836, + "step": 17350 + }, + { + "epoch": 0.7159775521993893, + "grad_norm": 2.1379318171367414, + "learning_rate": 5.911713875446839e-07, + "loss": 0.5069, + "step": 17351 + }, + { + "epoch": 0.7160188165387472, + "grad_norm": 5.292182510408952, + "learning_rate": 5.910119066402672e-07, + "loss": 0.4902, + "step": 17352 + }, + { + "epoch": 0.7160600808781051, + "grad_norm": 2.9877247657911163, + "learning_rate": 5.908524419726516e-07, + "loss": 0.5415, + "step": 17353 + }, + { + "epoch": 0.716101345217463, + "grad_norm": 2.2586680764905966, + "learning_rate": 5.906929935446853e-07, + "loss": 0.5242, + "step": 17354 + }, + { + "epoch": 0.716142609556821, + "grad_norm": 3.283036252463995, + "learning_rate": 5.905335613592168e-07, + "loss": 0.5324, + "step": 17355 + }, + { + "epoch": 0.7161838738961789, + "grad_norm": 2.80601168647409, + "learning_rate": 5.90374145419094e-07, + "loss": 0.4717, + "step": 17356 + }, + { + "epoch": 0.7162251382355368, + "grad_norm": 5.14860958663719, + "learning_rate": 5.902147457271651e-07, + "loss": 0.4946, + "step": 17357 + }, + { + "epoch": 0.7162664025748948, + "grad_norm": 7.690332749925679, + "learning_rate": 5.900553622862763e-07, + "loss": 0.5459, + "step": 17358 + }, + { + "epoch": 0.7163076669142527, + "grad_norm": 3.1279663927032924, + "learning_rate": 5.898959950992752e-07, + "loss": 0.5091, + "step": 17359 + }, + { + "epoch": 0.7163489312536107, + "grad_norm": 5.841072776245593, + "learning_rate": 5.89736644169008e-07, + "loss": 0.537, + "step": 17360 + }, + { + "epoch": 0.7163901955929686, + "grad_norm": 2.726772645298027, + "learning_rate": 5.895773094983221e-07, + "loss": 0.4714, + "step": 17361 + }, + { + "epoch": 0.7164314599323265, + "grad_norm": 6.978002328690308, + "learning_rate": 5.894179910900626e-07, + "loss": 0.5013, + "step": 17362 + }, + { + "epoch": 0.7164727242716844, + "grad_norm": 3.424939697299977, + "learning_rate": 5.892586889470756e-07, + "loss": 0.536, + "step": 17363 + }, + { + "epoch": 0.7165139886110423, + "grad_norm": 2.784962278679622, + "learning_rate": 5.890994030722068e-07, + "loss": 0.5266, + "step": 17364 + }, + { + "epoch": 0.7165552529504002, + "grad_norm": 4.0579834357498905, + "learning_rate": 5.889401334683017e-07, + "loss": 0.4998, + "step": 17365 + }, + { + "epoch": 0.7165965172897582, + "grad_norm": 4.697440453220946, + "learning_rate": 5.887808801382053e-07, + "loss": 0.5277, + "step": 17366 + }, + { + "epoch": 0.7166377816291161, + "grad_norm": 4.38621322725192, + "learning_rate": 5.886216430847612e-07, + "loss": 0.4915, + "step": 17367 + }, + { + "epoch": 0.7166790459684741, + "grad_norm": 3.6664432586592604, + "learning_rate": 5.884624223108143e-07, + "loss": 0.4856, + "step": 17368 + }, + { + "epoch": 0.716720310307832, + "grad_norm": 2.4264917558458494, + "learning_rate": 5.88303217819209e-07, + "loss": 0.4967, + "step": 17369 + }, + { + "epoch": 0.7167615746471899, + "grad_norm": 3.2748739403704934, + "learning_rate": 5.881440296127896e-07, + "loss": 0.543, + "step": 17370 + }, + { + "epoch": 0.7168028389865478, + "grad_norm": 6.1217922640434415, + "learning_rate": 5.879848576943983e-07, + "loss": 0.5021, + "step": 17371 + }, + { + "epoch": 0.7168441033259058, + "grad_norm": 2.8017479368520357, + "learning_rate": 5.87825702066879e-07, + "loss": 0.551, + "step": 17372 + }, + { + "epoch": 0.7168853676652637, + "grad_norm": 2.246515789964379, + "learning_rate": 5.876665627330751e-07, + "loss": 0.5134, + "step": 17373 + }, + { + "epoch": 0.7169266320046216, + "grad_norm": 3.708123632859329, + "learning_rate": 5.875074396958281e-07, + "loss": 0.5114, + "step": 17374 + }, + { + "epoch": 0.7169678963439795, + "grad_norm": 1.8088632893764085, + "learning_rate": 5.87348332957981e-07, + "loss": 0.4773, + "step": 17375 + }, + { + "epoch": 0.7170091606833374, + "grad_norm": 2.9672171699305543, + "learning_rate": 5.871892425223756e-07, + "loss": 0.493, + "step": 17376 + }, + { + "epoch": 0.7170504250226953, + "grad_norm": 2.446240553201975, + "learning_rate": 5.870301683918546e-07, + "loss": 0.532, + "step": 17377 + }, + { + "epoch": 0.7170916893620534, + "grad_norm": 3.834925752319987, + "learning_rate": 5.86871110569258e-07, + "loss": 0.472, + "step": 17378 + }, + { + "epoch": 0.7171329537014113, + "grad_norm": 12.786624892619969, + "learning_rate": 5.867120690574283e-07, + "loss": 0.5542, + "step": 17379 + }, + { + "epoch": 0.7171742180407692, + "grad_norm": 5.719359047900923, + "learning_rate": 5.865530438592052e-07, + "loss": 0.5245, + "step": 17380 + }, + { + "epoch": 0.7172154823801271, + "grad_norm": 5.493445570440868, + "learning_rate": 5.863940349774298e-07, + "loss": 0.5258, + "step": 17381 + }, + { + "epoch": 0.717256746719485, + "grad_norm": 3.1894579540567958, + "learning_rate": 5.862350424149431e-07, + "loss": 0.5307, + "step": 17382 + }, + { + "epoch": 0.7172980110588429, + "grad_norm": 2.647151877243772, + "learning_rate": 5.860760661745837e-07, + "loss": 0.5407, + "step": 17383 + }, + { + "epoch": 0.7173392753982009, + "grad_norm": 1.9219944864688716, + "learning_rate": 5.859171062591921e-07, + "loss": 0.5072, + "step": 17384 + }, + { + "epoch": 0.7173805397375588, + "grad_norm": 11.901201891629363, + "learning_rate": 5.857581626716076e-07, + "loss": 0.45, + "step": 17385 + }, + { + "epoch": 0.7174218040769167, + "grad_norm": 3.1558600222227557, + "learning_rate": 5.855992354146701e-07, + "loss": 0.4902, + "step": 17386 + }, + { + "epoch": 0.7174630684162746, + "grad_norm": 3.6569839371580577, + "learning_rate": 5.85440324491217e-07, + "loss": 0.5184, + "step": 17387 + }, + { + "epoch": 0.7175043327556326, + "grad_norm": 18.70567633436369, + "learning_rate": 5.852814299040877e-07, + "loss": 0.4906, + "step": 17388 + }, + { + "epoch": 0.7175455970949906, + "grad_norm": 3.0367666790393386, + "learning_rate": 5.851225516561204e-07, + "loss": 0.495, + "step": 17389 + }, + { + "epoch": 0.7175868614343485, + "grad_norm": 3.7220076209718354, + "learning_rate": 5.849636897501533e-07, + "loss": 0.4616, + "step": 17390 + }, + { + "epoch": 0.7176281257737064, + "grad_norm": 2.3897974073208332, + "learning_rate": 5.848048441890234e-07, + "loss": 0.4776, + "step": 17391 + }, + { + "epoch": 0.7176693901130643, + "grad_norm": 3.3449192330178303, + "learning_rate": 5.846460149755692e-07, + "loss": 0.5408, + "step": 17392 + }, + { + "epoch": 0.7177106544524222, + "grad_norm": 7.700049578345468, + "learning_rate": 5.844872021126262e-07, + "loss": 0.5133, + "step": 17393 + }, + { + "epoch": 0.7177519187917801, + "grad_norm": 4.711408780652101, + "learning_rate": 5.843284056030327e-07, + "loss": 0.5241, + "step": 17394 + }, + { + "epoch": 0.717793183131138, + "grad_norm": 4.1884747022920745, + "learning_rate": 5.841696254496239e-07, + "loss": 0.5324, + "step": 17395 + }, + { + "epoch": 0.717834447470496, + "grad_norm": 5.449205923230113, + "learning_rate": 5.840108616552367e-07, + "loss": 0.5863, + "step": 17396 + }, + { + "epoch": 0.7178757118098539, + "grad_norm": 2.4037428330823007, + "learning_rate": 5.838521142227071e-07, + "loss": 0.5069, + "step": 17397 + }, + { + "epoch": 0.7179169761492119, + "grad_norm": 2.2334583629770055, + "learning_rate": 5.836933831548708e-07, + "loss": 0.4943, + "step": 17398 + }, + { + "epoch": 0.7179582404885698, + "grad_norm": 3.813009761342667, + "learning_rate": 5.835346684545626e-07, + "loss": 0.5577, + "step": 17399 + }, + { + "epoch": 0.7179995048279277, + "grad_norm": 2.738415810232648, + "learning_rate": 5.833759701246179e-07, + "loss": 0.491, + "step": 17400 + }, + { + "epoch": 0.7180407691672857, + "grad_norm": 4.6543164182414465, + "learning_rate": 5.832172881678714e-07, + "loss": 0.5142, + "step": 17401 + }, + { + "epoch": 0.7180820335066436, + "grad_norm": 2.563492774406037, + "learning_rate": 5.830586225871581e-07, + "loss": 0.4731, + "step": 17402 + }, + { + "epoch": 0.7181232978460015, + "grad_norm": 2.3040783350657783, + "learning_rate": 5.82899973385311e-07, + "loss": 0.5229, + "step": 17403 + }, + { + "epoch": 0.7181645621853594, + "grad_norm": 3.5025328407579415, + "learning_rate": 5.827413405651647e-07, + "loss": 0.4969, + "step": 17404 + }, + { + "epoch": 0.7182058265247173, + "grad_norm": 2.9670031906793413, + "learning_rate": 5.825827241295533e-07, + "loss": 0.5157, + "step": 17405 + }, + { + "epoch": 0.7182470908640752, + "grad_norm": 2.7549950889987973, + "learning_rate": 5.824241240813088e-07, + "loss": 0.5025, + "step": 17406 + }, + { + "epoch": 0.7182883552034331, + "grad_norm": 3.967445720789979, + "learning_rate": 5.822655404232654e-07, + "loss": 0.5604, + "step": 17407 + }, + { + "epoch": 0.7183296195427911, + "grad_norm": 7.344393423377536, + "learning_rate": 5.821069731582549e-07, + "loss": 0.4506, + "step": 17408 + }, + { + "epoch": 0.7183708838821491, + "grad_norm": 2.348560991792155, + "learning_rate": 5.819484222891098e-07, + "loss": 0.5372, + "step": 17409 + }, + { + "epoch": 0.718412148221507, + "grad_norm": 2.9230263311180136, + "learning_rate": 5.817898878186633e-07, + "loss": 0.509, + "step": 17410 + }, + { + "epoch": 0.7184534125608649, + "grad_norm": 2.2597640971740303, + "learning_rate": 5.816313697497458e-07, + "loss": 0.4935, + "step": 17411 + }, + { + "epoch": 0.7184946769002228, + "grad_norm": 3.655452103797237, + "learning_rate": 5.814728680851893e-07, + "loss": 0.5174, + "step": 17412 + }, + { + "epoch": 0.7185359412395808, + "grad_norm": 5.261597317885288, + "learning_rate": 5.813143828278253e-07, + "loss": 0.5244, + "step": 17413 + }, + { + "epoch": 0.7185772055789387, + "grad_norm": 2.086876598694403, + "learning_rate": 5.811559139804851e-07, + "loss": 0.5091, + "step": 17414 + }, + { + "epoch": 0.7186184699182966, + "grad_norm": 2.2712886351390047, + "learning_rate": 5.809974615459983e-07, + "loss": 0.4829, + "step": 17415 + }, + { + "epoch": 0.7186597342576545, + "grad_norm": 2.708517229421976, + "learning_rate": 5.80839025527196e-07, + "loss": 0.5199, + "step": 17416 + }, + { + "epoch": 0.7187009985970124, + "grad_norm": 2.5643779339641495, + "learning_rate": 5.80680605926908e-07, + "loss": 0.4975, + "step": 17417 + }, + { + "epoch": 0.7187422629363703, + "grad_norm": 5.7033164086143815, + "learning_rate": 5.805222027479647e-07, + "loss": 0.499, + "step": 17418 + }, + { + "epoch": 0.7187835272757284, + "grad_norm": 3.180497510207016, + "learning_rate": 5.803638159931951e-07, + "loss": 0.5253, + "step": 17419 + }, + { + "epoch": 0.7188247916150863, + "grad_norm": 7.876342510308515, + "learning_rate": 5.802054456654276e-07, + "loss": 0.5142, + "step": 17420 + }, + { + "epoch": 0.7188660559544442, + "grad_norm": 2.192429735414423, + "learning_rate": 5.800470917674919e-07, + "loss": 0.5105, + "step": 17421 + }, + { + "epoch": 0.7189073202938021, + "grad_norm": 7.326528623331678, + "learning_rate": 5.798887543022166e-07, + "loss": 0.5275, + "step": 17422 + }, + { + "epoch": 0.71894858463316, + "grad_norm": 2.429206373770165, + "learning_rate": 5.797304332724305e-07, + "loss": 0.5002, + "step": 17423 + }, + { + "epoch": 0.7189898489725179, + "grad_norm": 2.3314991633176443, + "learning_rate": 5.795721286809603e-07, + "loss": 0.5097, + "step": 17424 + }, + { + "epoch": 0.7190311133118759, + "grad_norm": 5.005052795608614, + "learning_rate": 5.794138405306345e-07, + "loss": 0.4846, + "step": 17425 + }, + { + "epoch": 0.7190723776512338, + "grad_norm": 2.6363940982898537, + "learning_rate": 5.792555688242806e-07, + "loss": 0.4863, + "step": 17426 + }, + { + "epoch": 0.7191136419905917, + "grad_norm": 3.9302484953902876, + "learning_rate": 5.79097313564726e-07, + "loss": 0.5051, + "step": 17427 + }, + { + "epoch": 0.7191549063299496, + "grad_norm": 2.9041159696433025, + "learning_rate": 5.789390747547965e-07, + "loss": 0.5016, + "step": 17428 + }, + { + "epoch": 0.7191961706693076, + "grad_norm": 30.059418328572757, + "learning_rate": 5.787808523973195e-07, + "loss": 0.4546, + "step": 17429 + }, + { + "epoch": 0.7192374350086655, + "grad_norm": 5.826030290777784, + "learning_rate": 5.786226464951214e-07, + "loss": 0.5046, + "step": 17430 + }, + { + "epoch": 0.7192786993480235, + "grad_norm": 2.9042428019881634, + "learning_rate": 5.784644570510278e-07, + "loss": 0.5111, + "step": 17431 + }, + { + "epoch": 0.7193199636873814, + "grad_norm": 3.8675638151020983, + "learning_rate": 5.783062840678637e-07, + "loss": 0.4545, + "step": 17432 + }, + { + "epoch": 0.7193612280267393, + "grad_norm": 2.3521944953596674, + "learning_rate": 5.781481275484551e-07, + "loss": 0.507, + "step": 17433 + }, + { + "epoch": 0.7194024923660972, + "grad_norm": 4.304134141400275, + "learning_rate": 5.779899874956273e-07, + "loss": 0.5121, + "step": 17434 + }, + { + "epoch": 0.7194437567054551, + "grad_norm": 50.42412606849207, + "learning_rate": 5.778318639122051e-07, + "loss": 0.5393, + "step": 17435 + }, + { + "epoch": 0.719485021044813, + "grad_norm": 7.215009286377722, + "learning_rate": 5.776737568010122e-07, + "loss": 0.5416, + "step": 17436 + }, + { + "epoch": 0.719526285384171, + "grad_norm": 2.1287520518760457, + "learning_rate": 5.775156661648735e-07, + "loss": 0.498, + "step": 17437 + }, + { + "epoch": 0.7195675497235289, + "grad_norm": 4.075968902478659, + "learning_rate": 5.773575920066126e-07, + "loss": 0.496, + "step": 17438 + }, + { + "epoch": 0.7196088140628869, + "grad_norm": 3.192591072269687, + "learning_rate": 5.771995343290537e-07, + "loss": 0.4833, + "step": 17439 + }, + { + "epoch": 0.7196500784022448, + "grad_norm": 2.842167142521156, + "learning_rate": 5.770414931350192e-07, + "loss": 0.5239, + "step": 17440 + }, + { + "epoch": 0.7196913427416027, + "grad_norm": 3.6882872939760483, + "learning_rate": 5.768834684273325e-07, + "loss": 0.5651, + "step": 17441 + }, + { + "epoch": 0.7197326070809607, + "grad_norm": 2.287880152033153, + "learning_rate": 5.767254602088161e-07, + "loss": 0.4949, + "step": 17442 + }, + { + "epoch": 0.7197738714203186, + "grad_norm": 2.9848313730337686, + "learning_rate": 5.765674684822934e-07, + "loss": 0.5123, + "step": 17443 + }, + { + "epoch": 0.7198151357596765, + "grad_norm": 3.9276731265000677, + "learning_rate": 5.764094932505858e-07, + "loss": 0.5325, + "step": 17444 + }, + { + "epoch": 0.7198564000990344, + "grad_norm": 4.866651719435677, + "learning_rate": 5.762515345165145e-07, + "loss": 0.5024, + "step": 17445 + }, + { + "epoch": 0.7198976644383923, + "grad_norm": 5.486775223055876, + "learning_rate": 5.760935922829017e-07, + "loss": 0.5304, + "step": 17446 + }, + { + "epoch": 0.7199389287777502, + "grad_norm": 3.410395983392885, + "learning_rate": 5.759356665525688e-07, + "loss": 0.4861, + "step": 17447 + }, + { + "epoch": 0.7199801931171081, + "grad_norm": 6.47626528245926, + "learning_rate": 5.757777573283367e-07, + "loss": 0.582, + "step": 17448 + }, + { + "epoch": 0.7200214574564662, + "grad_norm": 2.918309242264193, + "learning_rate": 5.756198646130256e-07, + "loss": 0.4829, + "step": 17449 + }, + { + "epoch": 0.7200627217958241, + "grad_norm": 4.221333235147738, + "learning_rate": 5.754619884094563e-07, + "loss": 0.5035, + "step": 17450 + }, + { + "epoch": 0.720103986135182, + "grad_norm": 2.1141291438251257, + "learning_rate": 5.753041287204489e-07, + "loss": 0.4864, + "step": 17451 + }, + { + "epoch": 0.7201452504745399, + "grad_norm": 21.30708053429719, + "learning_rate": 5.751462855488227e-07, + "loss": 0.4255, + "step": 17452 + }, + { + "epoch": 0.7201865148138978, + "grad_norm": 2.510721877056112, + "learning_rate": 5.749884588973974e-07, + "loss": 0.5071, + "step": 17453 + }, + { + "epoch": 0.7202277791532558, + "grad_norm": 2.8384431215327064, + "learning_rate": 5.748306487689922e-07, + "loss": 0.4578, + "step": 17454 + }, + { + "epoch": 0.7202690434926137, + "grad_norm": 4.637257460118176, + "learning_rate": 5.746728551664268e-07, + "loss": 0.5433, + "step": 17455 + }, + { + "epoch": 0.7203103078319716, + "grad_norm": 4.061558436904876, + "learning_rate": 5.745150780925183e-07, + "loss": 0.5844, + "step": 17456 + }, + { + "epoch": 0.7203515721713295, + "grad_norm": 3.018076551170617, + "learning_rate": 5.743573175500862e-07, + "loss": 0.5154, + "step": 17457 + }, + { + "epoch": 0.7203928365106874, + "grad_norm": 3.5744408026923913, + "learning_rate": 5.741995735419475e-07, + "loss": 0.562, + "step": 17458 + }, + { + "epoch": 0.7204341008500454, + "grad_norm": 3.2286577447407794, + "learning_rate": 5.740418460709205e-07, + "loss": 0.531, + "step": 17459 + }, + { + "epoch": 0.7204753651894034, + "grad_norm": 4.7188231678614105, + "learning_rate": 5.73884135139823e-07, + "loss": 0.497, + "step": 17460 + }, + { + "epoch": 0.7205166295287613, + "grad_norm": 3.691303891745635, + "learning_rate": 5.737264407514711e-07, + "loss": 0.5183, + "step": 17461 + }, + { + "epoch": 0.7205578938681192, + "grad_norm": 2.323677430063486, + "learning_rate": 5.735687629086822e-07, + "loss": 0.5547, + "step": 17462 + }, + { + "epoch": 0.7205991582074771, + "grad_norm": 2.5329823769427886, + "learning_rate": 5.734111016142727e-07, + "loss": 0.5031, + "step": 17463 + }, + { + "epoch": 0.720640422546835, + "grad_norm": 2.9981880301642354, + "learning_rate": 5.732534568710594e-07, + "loss": 0.5493, + "step": 17464 + }, + { + "epoch": 0.7206816868861929, + "grad_norm": 2.166767413161232, + "learning_rate": 5.730958286818573e-07, + "loss": 0.4857, + "step": 17465 + }, + { + "epoch": 0.7207229512255509, + "grad_norm": 2.98897483962985, + "learning_rate": 5.729382170494824e-07, + "loss": 0.4931, + "step": 17466 + }, + { + "epoch": 0.7207642155649088, + "grad_norm": 3.3147189399520247, + "learning_rate": 5.727806219767502e-07, + "loss": 0.4905, + "step": 17467 + }, + { + "epoch": 0.7208054799042667, + "grad_norm": 4.590195980957886, + "learning_rate": 5.726230434664761e-07, + "loss": 0.5733, + "step": 17468 + }, + { + "epoch": 0.7208467442436246, + "grad_norm": 2.558849412115443, + "learning_rate": 5.724654815214737e-07, + "loss": 0.5224, + "step": 17469 + }, + { + "epoch": 0.7208880085829826, + "grad_norm": 2.7236743551459233, + "learning_rate": 5.723079361445582e-07, + "loss": 0.5471, + "step": 17470 + }, + { + "epoch": 0.7209292729223405, + "grad_norm": 2.6965378897308, + "learning_rate": 5.721504073385443e-07, + "loss": 0.5353, + "step": 17471 + }, + { + "epoch": 0.7209705372616985, + "grad_norm": 5.669243072705838, + "learning_rate": 5.719928951062453e-07, + "loss": 0.5632, + "step": 17472 + }, + { + "epoch": 0.7210118016010564, + "grad_norm": 4.911638897225034, + "learning_rate": 5.718353994504742e-07, + "loss": 0.5567, + "step": 17473 + }, + { + "epoch": 0.7210530659404143, + "grad_norm": 57.939186972895584, + "learning_rate": 5.716779203740448e-07, + "loss": 0.4728, + "step": 17474 + }, + { + "epoch": 0.7210943302797722, + "grad_norm": 2.856489409775086, + "learning_rate": 5.715204578797698e-07, + "loss": 0.514, + "step": 17475 + }, + { + "epoch": 0.7211355946191301, + "grad_norm": 2.9752769556889556, + "learning_rate": 5.713630119704628e-07, + "loss": 0.5259, + "step": 17476 + }, + { + "epoch": 0.721176858958488, + "grad_norm": 6.327164650056986, + "learning_rate": 5.71205582648935e-07, + "loss": 0.57, + "step": 17477 + }, + { + "epoch": 0.721218123297846, + "grad_norm": 10.188409145075573, + "learning_rate": 5.71048169917999e-07, + "loss": 0.5484, + "step": 17478 + }, + { + "epoch": 0.7212593876372039, + "grad_norm": 2.879144696398363, + "learning_rate": 5.708907737804665e-07, + "loss": 0.5738, + "step": 17479 + }, + { + "epoch": 0.7213006519765619, + "grad_norm": 5.462263724506708, + "learning_rate": 5.707333942391496e-07, + "loss": 0.5044, + "step": 17480 + }, + { + "epoch": 0.7213419163159198, + "grad_norm": 2.6510105843314333, + "learning_rate": 5.705760312968583e-07, + "loss": 0.497, + "step": 17481 + }, + { + "epoch": 0.7213831806552777, + "grad_norm": 9.097545781635691, + "learning_rate": 5.704186849564045e-07, + "loss": 0.5747, + "step": 17482 + }, + { + "epoch": 0.7214244449946356, + "grad_norm": 6.636543199296151, + "learning_rate": 5.702613552205982e-07, + "loss": 0.5069, + "step": 17483 + }, + { + "epoch": 0.7214657093339936, + "grad_norm": 3.0032054368133045, + "learning_rate": 5.701040420922505e-07, + "loss": 0.5304, + "step": 17484 + }, + { + "epoch": 0.7215069736733515, + "grad_norm": 36.21972922776488, + "learning_rate": 5.699467455741707e-07, + "loss": 0.6015, + "step": 17485 + }, + { + "epoch": 0.7215482380127094, + "grad_norm": 9.477929159423956, + "learning_rate": 5.697894656691682e-07, + "loss": 0.5325, + "step": 17486 + }, + { + "epoch": 0.7215895023520673, + "grad_norm": 7.2195768555068645, + "learning_rate": 5.696322023800531e-07, + "loss": 0.5782, + "step": 17487 + }, + { + "epoch": 0.7216307666914252, + "grad_norm": 3.302432614770799, + "learning_rate": 5.694749557096346e-07, + "loss": 0.5054, + "step": 17488 + }, + { + "epoch": 0.7216720310307831, + "grad_norm": 3.2870922460861327, + "learning_rate": 5.693177256607207e-07, + "loss": 0.5249, + "step": 17489 + }, + { + "epoch": 0.7217132953701412, + "grad_norm": 3.2306847907389757, + "learning_rate": 5.691605122361205e-07, + "loss": 0.4675, + "step": 17490 + }, + { + "epoch": 0.7217545597094991, + "grad_norm": 2.6872106660836583, + "learning_rate": 5.690033154386421e-07, + "loss": 0.4584, + "step": 17491 + }, + { + "epoch": 0.721795824048857, + "grad_norm": 3.602256958209911, + "learning_rate": 5.688461352710941e-07, + "loss": 0.5314, + "step": 17492 + }, + { + "epoch": 0.7218370883882149, + "grad_norm": 2.4682970988368576, + "learning_rate": 5.686889717362829e-07, + "loss": 0.5591, + "step": 17493 + }, + { + "epoch": 0.7218783527275728, + "grad_norm": 2.0540602894341076, + "learning_rate": 5.685318248370163e-07, + "loss": 0.5031, + "step": 17494 + }, + { + "epoch": 0.7219196170669308, + "grad_norm": 5.229177349500565, + "learning_rate": 5.683746945761019e-07, + "loss": 0.467, + "step": 17495 + }, + { + "epoch": 0.7219608814062887, + "grad_norm": 2.417429484692833, + "learning_rate": 5.682175809563461e-07, + "loss": 0.5022, + "step": 17496 + }, + { + "epoch": 0.7220021457456466, + "grad_norm": 2.527355241760786, + "learning_rate": 5.680604839805556e-07, + "loss": 0.5105, + "step": 17497 + }, + { + "epoch": 0.7220434100850045, + "grad_norm": 2.0342486623059113, + "learning_rate": 5.679034036515356e-07, + "loss": 0.5348, + "step": 17498 + }, + { + "epoch": 0.7220846744243624, + "grad_norm": 4.459393620864165, + "learning_rate": 5.677463399720923e-07, + "loss": 0.5133, + "step": 17499 + }, + { + "epoch": 0.7221259387637204, + "grad_norm": 6.047811703797125, + "learning_rate": 5.675892929450318e-07, + "loss": 0.5615, + "step": 17500 + }, + { + "epoch": 0.7221672031030784, + "grad_norm": 2.4781943571738023, + "learning_rate": 5.674322625731594e-07, + "loss": 0.5373, + "step": 17501 + }, + { + "epoch": 0.7222084674424363, + "grad_norm": 3.3254590155469947, + "learning_rate": 5.672752488592795e-07, + "loss": 0.574, + "step": 17502 + }, + { + "epoch": 0.7222497317817942, + "grad_norm": 5.0684702899487935, + "learning_rate": 5.671182518061967e-07, + "loss": 0.577, + "step": 17503 + }, + { + "epoch": 0.7222909961211521, + "grad_norm": 1.9916949655501703, + "learning_rate": 5.669612714167158e-07, + "loss": 0.491, + "step": 17504 + }, + { + "epoch": 0.72233226046051, + "grad_norm": 4.3252209356864055, + "learning_rate": 5.668043076936412e-07, + "loss": 0.5156, + "step": 17505 + }, + { + "epoch": 0.7223735247998679, + "grad_norm": 3.4506153879009647, + "learning_rate": 5.666473606397755e-07, + "loss": 0.5477, + "step": 17506 + }, + { + "epoch": 0.7224147891392259, + "grad_norm": 3.0713281569900586, + "learning_rate": 5.664904302579229e-07, + "loss": 0.5192, + "step": 17507 + }, + { + "epoch": 0.7224560534785838, + "grad_norm": 10.948022458448126, + "learning_rate": 5.663335165508872e-07, + "loss": 0.5159, + "step": 17508 + }, + { + "epoch": 0.7224973178179417, + "grad_norm": 4.310846527436338, + "learning_rate": 5.661766195214697e-07, + "loss": 0.5202, + "step": 17509 + }, + { + "epoch": 0.7225385821572997, + "grad_norm": 2.3765899111821702, + "learning_rate": 5.660197391724745e-07, + "loss": 0.521, + "step": 17510 + }, + { + "epoch": 0.7225798464966576, + "grad_norm": 4.389917217833268, + "learning_rate": 5.658628755067028e-07, + "loss": 0.4862, + "step": 17511 + }, + { + "epoch": 0.7226211108360155, + "grad_norm": 2.787323473365897, + "learning_rate": 5.65706028526957e-07, + "loss": 0.5538, + "step": 17512 + }, + { + "epoch": 0.7226623751753735, + "grad_norm": 2.543962932496638, + "learning_rate": 5.655491982360392e-07, + "loss": 0.5222, + "step": 17513 + }, + { + "epoch": 0.7227036395147314, + "grad_norm": 2.378700983861485, + "learning_rate": 5.653923846367498e-07, + "loss": 0.5243, + "step": 17514 + }, + { + "epoch": 0.7227449038540893, + "grad_norm": 11.749129239055337, + "learning_rate": 5.652355877318906e-07, + "loss": 0.5079, + "step": 17515 + }, + { + "epoch": 0.7227861681934472, + "grad_norm": 8.683640188684866, + "learning_rate": 5.650788075242621e-07, + "loss": 0.5341, + "step": 17516 + }, + { + "epoch": 0.7228274325328051, + "grad_norm": 2.5528120746269587, + "learning_rate": 5.649220440166655e-07, + "loss": 0.4957, + "step": 17517 + }, + { + "epoch": 0.722868696872163, + "grad_norm": 17.755654596570846, + "learning_rate": 5.647652972118998e-07, + "loss": 0.5235, + "step": 17518 + }, + { + "epoch": 0.722909961211521, + "grad_norm": 8.351575269267471, + "learning_rate": 5.646085671127655e-07, + "loss": 0.483, + "step": 17519 + }, + { + "epoch": 0.722951225550879, + "grad_norm": 4.740866967542746, + "learning_rate": 5.644518537220623e-07, + "loss": 0.5045, + "step": 17520 + }, + { + "epoch": 0.7229924898902369, + "grad_norm": 2.8109037119666103, + "learning_rate": 5.642951570425899e-07, + "loss": 0.5107, + "step": 17521 + }, + { + "epoch": 0.7230337542295948, + "grad_norm": 4.251336605281452, + "learning_rate": 5.641384770771461e-07, + "loss": 0.5863, + "step": 17522 + }, + { + "epoch": 0.7230750185689527, + "grad_norm": 3.3294794524937212, + "learning_rate": 5.639818138285308e-07, + "loss": 0.4961, + "step": 17523 + }, + { + "epoch": 0.7231162829083106, + "grad_norm": 12.510155720509637, + "learning_rate": 5.638251672995414e-07, + "loss": 0.5207, + "step": 17524 + }, + { + "epoch": 0.7231575472476686, + "grad_norm": 3.1257120841157873, + "learning_rate": 5.636685374929766e-07, + "loss": 0.5027, + "step": 17525 + }, + { + "epoch": 0.7231988115870265, + "grad_norm": 2.5333923370216387, + "learning_rate": 5.635119244116343e-07, + "loss": 0.5517, + "step": 17526 + }, + { + "epoch": 0.7232400759263844, + "grad_norm": 5.1119777768341175, + "learning_rate": 5.633553280583115e-07, + "loss": 0.5061, + "step": 17527 + }, + { + "epoch": 0.7232813402657423, + "grad_norm": 3.4051577344516257, + "learning_rate": 5.631987484358055e-07, + "loss": 0.5508, + "step": 17528 + }, + { + "epoch": 0.7233226046051002, + "grad_norm": 31.535397719005363, + "learning_rate": 5.63042185546914e-07, + "loss": 0.483, + "step": 17529 + }, + { + "epoch": 0.7233638689444581, + "grad_norm": 6.583284178975784, + "learning_rate": 5.628856393944323e-07, + "loss": 0.5464, + "step": 17530 + }, + { + "epoch": 0.7234051332838162, + "grad_norm": 2.007666771085082, + "learning_rate": 5.627291099811576e-07, + "loss": 0.5254, + "step": 17531 + }, + { + "epoch": 0.7234463976231741, + "grad_norm": 2.6355767405418637, + "learning_rate": 5.625725973098855e-07, + "loss": 0.5264, + "step": 17532 + }, + { + "epoch": 0.723487661962532, + "grad_norm": 5.5016150601417735, + "learning_rate": 5.624161013834124e-07, + "loss": 0.543, + "step": 17533 + }, + { + "epoch": 0.7235289263018899, + "grad_norm": 5.11266291420263, + "learning_rate": 5.622596222045329e-07, + "loss": 0.5214, + "step": 17534 + }, + { + "epoch": 0.7235701906412478, + "grad_norm": 2.593409802920945, + "learning_rate": 5.621031597760422e-07, + "loss": 0.5151, + "step": 17535 + }, + { + "epoch": 0.7236114549806058, + "grad_norm": 2.9831155484079175, + "learning_rate": 5.619467141007358e-07, + "loss": 0.4754, + "step": 17536 + }, + { + "epoch": 0.7236527193199637, + "grad_norm": 3.561315195383653, + "learning_rate": 5.617902851814075e-07, + "loss": 0.4833, + "step": 17537 + }, + { + "epoch": 0.7236939836593216, + "grad_norm": 8.486691995008396, + "learning_rate": 5.616338730208522e-07, + "loss": 0.4757, + "step": 17538 + }, + { + "epoch": 0.7237352479986795, + "grad_norm": 2.1097388282501046, + "learning_rate": 5.614774776218627e-07, + "loss": 0.501, + "step": 17539 + }, + { + "epoch": 0.7237765123380374, + "grad_norm": 3.6453977016422754, + "learning_rate": 5.613210989872333e-07, + "loss": 0.5395, + "step": 17540 + }, + { + "epoch": 0.7238177766773954, + "grad_norm": 2.293368438586975, + "learning_rate": 5.611647371197573e-07, + "loss": 0.481, + "step": 17541 + }, + { + "epoch": 0.7238590410167534, + "grad_norm": 3.3200944104623287, + "learning_rate": 5.610083920222283e-07, + "loss": 0.5358, + "step": 17542 + }, + { + "epoch": 0.7239003053561113, + "grad_norm": 2.6597063851010687, + "learning_rate": 5.608520636974378e-07, + "loss": 0.5212, + "step": 17543 + }, + { + "epoch": 0.7239415696954692, + "grad_norm": 3.904730796012352, + "learning_rate": 5.606957521481789e-07, + "loss": 0.5099, + "step": 17544 + }, + { + "epoch": 0.7239828340348271, + "grad_norm": 1.982580466796979, + "learning_rate": 5.605394573772437e-07, + "loss": 0.5263, + "step": 17545 + }, + { + "epoch": 0.724024098374185, + "grad_norm": 4.155214718668662, + "learning_rate": 5.603831793874243e-07, + "loss": 0.5049, + "step": 17546 + }, + { + "epoch": 0.7240653627135429, + "grad_norm": 2.766165733291196, + "learning_rate": 5.602269181815114e-07, + "loss": 0.4996, + "step": 17547 + }, + { + "epoch": 0.7241066270529009, + "grad_norm": 2.65023810754655, + "learning_rate": 5.600706737622967e-07, + "loss": 0.5527, + "step": 17548 + }, + { + "epoch": 0.7241478913922588, + "grad_norm": 2.345825790823966, + "learning_rate": 5.599144461325718e-07, + "loss": 0.4829, + "step": 17549 + }, + { + "epoch": 0.7241891557316167, + "grad_norm": 4.135242385074969, + "learning_rate": 5.597582352951265e-07, + "loss": 0.5057, + "step": 17550 + }, + { + "epoch": 0.7242304200709747, + "grad_norm": 3.0990588331906133, + "learning_rate": 5.596020412527507e-07, + "loss": 0.5027, + "step": 17551 + }, + { + "epoch": 0.7242716844103326, + "grad_norm": 3.9919924327510574, + "learning_rate": 5.59445864008235e-07, + "loss": 0.5823, + "step": 17552 + }, + { + "epoch": 0.7243129487496905, + "grad_norm": 2.950025526865539, + "learning_rate": 5.59289703564369e-07, + "loss": 0.5266, + "step": 17553 + }, + { + "epoch": 0.7243542130890485, + "grad_norm": 5.271996805768255, + "learning_rate": 5.591335599239427e-07, + "loss": 0.5329, + "step": 17554 + }, + { + "epoch": 0.7243954774284064, + "grad_norm": 3.1081080489957795, + "learning_rate": 5.589774330897443e-07, + "loss": 0.4916, + "step": 17555 + }, + { + "epoch": 0.7244367417677643, + "grad_norm": 2.5179896349856246, + "learning_rate": 5.58821323064563e-07, + "loss": 0.529, + "step": 17556 + }, + { + "epoch": 0.7244780061071222, + "grad_norm": 2.760590253287695, + "learning_rate": 5.586652298511872e-07, + "loss": 0.4987, + "step": 17557 + }, + { + "epoch": 0.7245192704464801, + "grad_norm": 2.415053347743267, + "learning_rate": 5.58509153452406e-07, + "loss": 0.5569, + "step": 17558 + }, + { + "epoch": 0.724560534785838, + "grad_norm": 2.4132098423644845, + "learning_rate": 5.583530938710059e-07, + "loss": 0.4698, + "step": 17559 + }, + { + "epoch": 0.724601799125196, + "grad_norm": 2.926691330150066, + "learning_rate": 5.581970511097753e-07, + "loss": 0.5433, + "step": 17560 + }, + { + "epoch": 0.724643063464554, + "grad_norm": 2.6295308536379256, + "learning_rate": 5.580410251715012e-07, + "loss": 0.5166, + "step": 17561 + }, + { + "epoch": 0.7246843278039119, + "grad_norm": 6.407146851360437, + "learning_rate": 5.578850160589715e-07, + "loss": 0.4878, + "step": 17562 + }, + { + "epoch": 0.7247255921432698, + "grad_norm": 4.398157051166785, + "learning_rate": 5.577290237749723e-07, + "loss": 0.5104, + "step": 17563 + }, + { + "epoch": 0.7247668564826277, + "grad_norm": 2.678432177651738, + "learning_rate": 5.575730483222891e-07, + "loss": 0.5211, + "step": 17564 + }, + { + "epoch": 0.7248081208219856, + "grad_norm": 3.9666733164250756, + "learning_rate": 5.57417089703709e-07, + "loss": 0.4871, + "step": 17565 + }, + { + "epoch": 0.7248493851613436, + "grad_norm": 7.5055547502928075, + "learning_rate": 5.572611479220183e-07, + "loss": 0.4999, + "step": 17566 + }, + { + "epoch": 0.7248906495007015, + "grad_norm": 3.2889291717973856, + "learning_rate": 5.571052229800013e-07, + "loss": 0.4895, + "step": 17567 + }, + { + "epoch": 0.7249319138400594, + "grad_norm": 3.6583814928097618, + "learning_rate": 5.569493148804437e-07, + "loss": 0.5516, + "step": 17568 + }, + { + "epoch": 0.7249731781794173, + "grad_norm": 4.640362028859894, + "learning_rate": 5.567934236261305e-07, + "loss": 0.5314, + "step": 17569 + }, + { + "epoch": 0.7250144425187752, + "grad_norm": 3.009440310360183, + "learning_rate": 5.566375492198469e-07, + "loss": 0.4923, + "step": 17570 + }, + { + "epoch": 0.7250557068581333, + "grad_norm": 11.387693250729727, + "learning_rate": 5.564816916643761e-07, + "loss": 0.5724, + "step": 17571 + }, + { + "epoch": 0.7250969711974912, + "grad_norm": 2.5703091843302497, + "learning_rate": 5.563258509625025e-07, + "loss": 0.4951, + "step": 17572 + }, + { + "epoch": 0.7251382355368491, + "grad_norm": 4.8415936679873814, + "learning_rate": 5.5617002711701e-07, + "loss": 0.5239, + "step": 17573 + }, + { + "epoch": 0.725179499876207, + "grad_norm": 4.1510506588898695, + "learning_rate": 5.560142201306823e-07, + "loss": 0.4849, + "step": 17574 + }, + { + "epoch": 0.7252207642155649, + "grad_norm": 2.293343032473044, + "learning_rate": 5.558584300063017e-07, + "loss": 0.4502, + "step": 17575 + }, + { + "epoch": 0.7252620285549228, + "grad_norm": 3.708412914846953, + "learning_rate": 5.55702656746652e-07, + "loss": 0.5457, + "step": 17576 + }, + { + "epoch": 0.7253032928942807, + "grad_norm": 2.789912786521275, + "learning_rate": 5.555469003545145e-07, + "loss": 0.5443, + "step": 17577 + }, + { + "epoch": 0.7253445572336387, + "grad_norm": 3.581424393589691, + "learning_rate": 5.553911608326721e-07, + "loss": 0.4595, + "step": 17578 + }, + { + "epoch": 0.7253858215729966, + "grad_norm": 4.233379104683408, + "learning_rate": 5.552354381839071e-07, + "loss": 0.5313, + "step": 17579 + }, + { + "epoch": 0.7254270859123545, + "grad_norm": 11.490668437633632, + "learning_rate": 5.550797324109999e-07, + "loss": 0.4901, + "step": 17580 + }, + { + "epoch": 0.7254683502517125, + "grad_norm": 20.476324637221786, + "learning_rate": 5.549240435167327e-07, + "loss": 0.5591, + "step": 17581 + }, + { + "epoch": 0.7255096145910704, + "grad_norm": 4.142680646512014, + "learning_rate": 5.547683715038864e-07, + "loss": 0.4786, + "step": 17582 + }, + { + "epoch": 0.7255508789304284, + "grad_norm": 3.363598782793297, + "learning_rate": 5.546127163752419e-07, + "loss": 0.5586, + "step": 17583 + }, + { + "epoch": 0.7255921432697863, + "grad_norm": 4.355947819486144, + "learning_rate": 5.544570781335788e-07, + "loss": 0.4669, + "step": 17584 + }, + { + "epoch": 0.7256334076091442, + "grad_norm": 4.350595505849674, + "learning_rate": 5.543014567816779e-07, + "loss": 0.4245, + "step": 17585 + }, + { + "epoch": 0.7256746719485021, + "grad_norm": 2.3472576942561907, + "learning_rate": 5.541458523223189e-07, + "loss": 0.4769, + "step": 17586 + }, + { + "epoch": 0.72571593628786, + "grad_norm": 3.8634169140489796, + "learning_rate": 5.53990264758281e-07, + "loss": 0.5187, + "step": 17587 + }, + { + "epoch": 0.7257572006272179, + "grad_norm": 3.741063905209217, + "learning_rate": 5.538346940923433e-07, + "loss": 0.5102, + "step": 17588 + }, + { + "epoch": 0.7257984649665759, + "grad_norm": 3.2113951526335804, + "learning_rate": 5.536791403272855e-07, + "loss": 0.535, + "step": 17589 + }, + { + "epoch": 0.7258397293059338, + "grad_norm": 2.7291033944350853, + "learning_rate": 5.53523603465885e-07, + "loss": 0.5263, + "step": 17590 + }, + { + "epoch": 0.7258809936452918, + "grad_norm": 3.2609022500075775, + "learning_rate": 5.533680835109212e-07, + "loss": 0.5084, + "step": 17591 + }, + { + "epoch": 0.7259222579846497, + "grad_norm": 3.9863641337581375, + "learning_rate": 5.532125804651712e-07, + "loss": 0.4719, + "step": 17592 + }, + { + "epoch": 0.7259635223240076, + "grad_norm": 2.1671981986294604, + "learning_rate": 5.530570943314127e-07, + "loss": 0.4664, + "step": 17593 + }, + { + "epoch": 0.7260047866633655, + "grad_norm": 3.716487251979657, + "learning_rate": 5.529016251124237e-07, + "loss": 0.4928, + "step": 17594 + }, + { + "epoch": 0.7260460510027235, + "grad_norm": 2.342319189466503, + "learning_rate": 5.527461728109812e-07, + "loss": 0.4848, + "step": 17595 + }, + { + "epoch": 0.7260873153420814, + "grad_norm": 2.7986647714121142, + "learning_rate": 5.525907374298614e-07, + "loss": 0.5271, + "step": 17596 + }, + { + "epoch": 0.7261285796814393, + "grad_norm": 6.153358551839614, + "learning_rate": 5.524353189718409e-07, + "loss": 0.4429, + "step": 17597 + }, + { + "epoch": 0.7261698440207972, + "grad_norm": 5.329758844247974, + "learning_rate": 5.522799174396963e-07, + "loss": 0.4966, + "step": 17598 + }, + { + "epoch": 0.7262111083601551, + "grad_norm": 2.947124068555125, + "learning_rate": 5.521245328362036e-07, + "loss": 0.5091, + "step": 17599 + }, + { + "epoch": 0.726252372699513, + "grad_norm": 2.971064086862619, + "learning_rate": 5.519691651641373e-07, + "loss": 0.5345, + "step": 17600 + }, + { + "epoch": 0.726293637038871, + "grad_norm": 5.9631154802501865, + "learning_rate": 5.518138144262734e-07, + "loss": 0.5209, + "step": 17601 + }, + { + "epoch": 0.726334901378229, + "grad_norm": 2.936142510152628, + "learning_rate": 5.516584806253873e-07, + "loss": 0.5421, + "step": 17602 + }, + { + "epoch": 0.7263761657175869, + "grad_norm": 2.757639440498815, + "learning_rate": 5.515031637642528e-07, + "loss": 0.5286, + "step": 17603 + }, + { + "epoch": 0.7264174300569448, + "grad_norm": 2.9764506552393626, + "learning_rate": 5.513478638456449e-07, + "loss": 0.5456, + "step": 17604 + }, + { + "epoch": 0.7264586943963027, + "grad_norm": 3.4004199821506442, + "learning_rate": 5.511925808723367e-07, + "loss": 0.5613, + "step": 17605 + }, + { + "epoch": 0.7264999587356606, + "grad_norm": 3.8279920613610607, + "learning_rate": 5.510373148471027e-07, + "loss": 0.5023, + "step": 17606 + }, + { + "epoch": 0.7265412230750186, + "grad_norm": 2.8338792272660585, + "learning_rate": 5.508820657727168e-07, + "loss": 0.5616, + "step": 17607 + }, + { + "epoch": 0.7265824874143765, + "grad_norm": 2.587360064293092, + "learning_rate": 5.507268336519509e-07, + "loss": 0.4887, + "step": 17608 + }, + { + "epoch": 0.7266237517537344, + "grad_norm": 2.44079482291524, + "learning_rate": 5.505716184875785e-07, + "loss": 0.512, + "step": 17609 + }, + { + "epoch": 0.7266650160930923, + "grad_norm": 2.45565200660676, + "learning_rate": 5.50416420282372e-07, + "loss": 0.4606, + "step": 17610 + }, + { + "epoch": 0.7267062804324502, + "grad_norm": 6.256105516840843, + "learning_rate": 5.502612390391044e-07, + "loss": 0.4439, + "step": 17611 + }, + { + "epoch": 0.7267475447718083, + "grad_norm": 3.4242498214373196, + "learning_rate": 5.501060747605463e-07, + "loss": 0.5202, + "step": 17612 + }, + { + "epoch": 0.7267888091111662, + "grad_norm": 2.9533530520121776, + "learning_rate": 5.499509274494701e-07, + "loss": 0.4936, + "step": 17613 + }, + { + "epoch": 0.7268300734505241, + "grad_norm": 2.872742761720682, + "learning_rate": 5.497957971086469e-07, + "loss": 0.5261, + "step": 17614 + }, + { + "epoch": 0.726871337789882, + "grad_norm": 10.39612135640058, + "learning_rate": 5.496406837408483e-07, + "loss": 0.4971, + "step": 17615 + }, + { + "epoch": 0.7269126021292399, + "grad_norm": 13.633028896947685, + "learning_rate": 5.494855873488446e-07, + "loss": 0.526, + "step": 17616 + }, + { + "epoch": 0.7269538664685978, + "grad_norm": 2.304627924921566, + "learning_rate": 5.493305079354057e-07, + "loss": 0.4678, + "step": 17617 + }, + { + "epoch": 0.7269951308079557, + "grad_norm": 6.274954017775061, + "learning_rate": 5.49175445503302e-07, + "loss": 0.5109, + "step": 17618 + }, + { + "epoch": 0.7270363951473137, + "grad_norm": 2.4836884899903615, + "learning_rate": 5.490204000553035e-07, + "loss": 0.5331, + "step": 17619 + }, + { + "epoch": 0.7270776594866716, + "grad_norm": 9.04499910255142, + "learning_rate": 5.488653715941801e-07, + "loss": 0.557, + "step": 17620 + }, + { + "epoch": 0.7271189238260295, + "grad_norm": 10.132937231393102, + "learning_rate": 5.487103601227001e-07, + "loss": 0.5015, + "step": 17621 + }, + { + "epoch": 0.7271601881653875, + "grad_norm": 2.695716938816139, + "learning_rate": 5.485553656436328e-07, + "loss": 0.5681, + "step": 17622 + }, + { + "epoch": 0.7272014525047454, + "grad_norm": 3.1693842894203663, + "learning_rate": 5.484003881597468e-07, + "loss": 0.6019, + "step": 17623 + }, + { + "epoch": 0.7272427168441034, + "grad_norm": 4.3476627184977605, + "learning_rate": 5.482454276738109e-07, + "loss": 0.5209, + "step": 17624 + }, + { + "epoch": 0.7272839811834613, + "grad_norm": 3.1052550675172697, + "learning_rate": 5.480904841885921e-07, + "loss": 0.5048, + "step": 17625 + }, + { + "epoch": 0.7273252455228192, + "grad_norm": 2.1983927428907153, + "learning_rate": 5.479355577068585e-07, + "loss": 0.5599, + "step": 17626 + }, + { + "epoch": 0.7273665098621771, + "grad_norm": 3.059028189819106, + "learning_rate": 5.477806482313782e-07, + "loss": 0.5291, + "step": 17627 + }, + { + "epoch": 0.727407774201535, + "grad_norm": 4.0745111288310865, + "learning_rate": 5.476257557649177e-07, + "loss": 0.5086, + "step": 17628 + }, + { + "epoch": 0.7274490385408929, + "grad_norm": 3.088211617022381, + "learning_rate": 5.474708803102428e-07, + "loss": 0.5105, + "step": 17629 + }, + { + "epoch": 0.7274903028802508, + "grad_norm": 3.0094074146781873, + "learning_rate": 5.473160218701212e-07, + "loss": 0.5072, + "step": 17630 + }, + { + "epoch": 0.7275315672196088, + "grad_norm": 5.380880600130863, + "learning_rate": 5.471611804473184e-07, + "loss": 0.4918, + "step": 17631 + }, + { + "epoch": 0.7275728315589668, + "grad_norm": 3.234201267508089, + "learning_rate": 5.470063560446011e-07, + "loss": 0.5182, + "step": 17632 + }, + { + "epoch": 0.7276140958983247, + "grad_norm": 2.2887974669633833, + "learning_rate": 5.46851548664734e-07, + "loss": 0.481, + "step": 17633 + }, + { + "epoch": 0.7276553602376826, + "grad_norm": 2.19532003268313, + "learning_rate": 5.466967583104826e-07, + "loss": 0.4941, + "step": 17634 + }, + { + "epoch": 0.7276966245770405, + "grad_norm": 3.409069259200255, + "learning_rate": 5.465419849846119e-07, + "loss": 0.5039, + "step": 17635 + }, + { + "epoch": 0.7277378889163985, + "grad_norm": 3.6965100142052982, + "learning_rate": 5.46387228689887e-07, + "loss": 0.5085, + "step": 17636 + }, + { + "epoch": 0.7277791532557564, + "grad_norm": 8.32627093900138, + "learning_rate": 5.462324894290712e-07, + "loss": 0.5166, + "step": 17637 + }, + { + "epoch": 0.7278204175951143, + "grad_norm": 4.027978584760997, + "learning_rate": 5.460777672049292e-07, + "loss": 0.4755, + "step": 17638 + }, + { + "epoch": 0.7278616819344722, + "grad_norm": 5.434189692008091, + "learning_rate": 5.459230620202246e-07, + "loss": 0.5485, + "step": 17639 + }, + { + "epoch": 0.7279029462738301, + "grad_norm": 3.5650977212711306, + "learning_rate": 5.457683738777213e-07, + "loss": 0.5577, + "step": 17640 + }, + { + "epoch": 0.727944210613188, + "grad_norm": 5.284282529490763, + "learning_rate": 5.456137027801815e-07, + "loss": 0.5037, + "step": 17641 + }, + { + "epoch": 0.7279854749525461, + "grad_norm": 2.7491277894800583, + "learning_rate": 5.454590487303689e-07, + "loss": 0.4826, + "step": 17642 + }, + { + "epoch": 0.728026739291904, + "grad_norm": 3.3494900019073364, + "learning_rate": 5.453044117310452e-07, + "loss": 0.507, + "step": 17643 + }, + { + "epoch": 0.7280680036312619, + "grad_norm": 2.1195581092053413, + "learning_rate": 5.451497917849733e-07, + "loss": 0.5296, + "step": 17644 + }, + { + "epoch": 0.7281092679706198, + "grad_norm": 3.345301902520336, + "learning_rate": 5.449951888949146e-07, + "loss": 0.5155, + "step": 17645 + }, + { + "epoch": 0.7281505323099777, + "grad_norm": 2.698091854783055, + "learning_rate": 5.448406030636306e-07, + "loss": 0.496, + "step": 17646 + }, + { + "epoch": 0.7281917966493356, + "grad_norm": 2.6400168240977226, + "learning_rate": 5.446860342938831e-07, + "loss": 0.4883, + "step": 17647 + }, + { + "epoch": 0.7282330609886936, + "grad_norm": 2.8700360029416827, + "learning_rate": 5.445314825884333e-07, + "loss": 0.4622, + "step": 17648 + }, + { + "epoch": 0.7282743253280515, + "grad_norm": 4.687586272328694, + "learning_rate": 5.443769479500408e-07, + "loss": 0.4863, + "step": 17649 + }, + { + "epoch": 0.7283155896674094, + "grad_norm": 3.467707559531137, + "learning_rate": 5.442224303814668e-07, + "loss": 0.5048, + "step": 17650 + }, + { + "epoch": 0.7283568540067673, + "grad_norm": 4.346855681817795, + "learning_rate": 5.440679298854709e-07, + "loss": 0.5289, + "step": 17651 + }, + { + "epoch": 0.7283981183461253, + "grad_norm": 3.4006498533507084, + "learning_rate": 5.439134464648138e-07, + "loss": 0.4762, + "step": 17652 + }, + { + "epoch": 0.7284393826854832, + "grad_norm": 4.895028300996956, + "learning_rate": 5.437589801222539e-07, + "loss": 0.5284, + "step": 17653 + }, + { + "epoch": 0.7284806470248412, + "grad_norm": 2.798723470448597, + "learning_rate": 5.436045308605506e-07, + "loss": 0.5209, + "step": 17654 + }, + { + "epoch": 0.7285219113641991, + "grad_norm": 4.51527356934008, + "learning_rate": 5.434500986824637e-07, + "loss": 0.5459, + "step": 17655 + }, + { + "epoch": 0.728563175703557, + "grad_norm": 2.0426776257683628, + "learning_rate": 5.432956835907501e-07, + "loss": 0.4751, + "step": 17656 + }, + { + "epoch": 0.7286044400429149, + "grad_norm": 2.924015717121956, + "learning_rate": 5.431412855881697e-07, + "loss": 0.494, + "step": 17657 + }, + { + "epoch": 0.7286457043822728, + "grad_norm": 3.2488951010943725, + "learning_rate": 5.429869046774789e-07, + "loss": 0.4797, + "step": 17658 + }, + { + "epoch": 0.7286869687216307, + "grad_norm": 3.620324262326382, + "learning_rate": 5.428325408614361e-07, + "loss": 0.5313, + "step": 17659 + }, + { + "epoch": 0.7287282330609887, + "grad_norm": 3.3120298362649954, + "learning_rate": 5.426781941427987e-07, + "loss": 0.5029, + "step": 17660 + }, + { + "epoch": 0.7287694974003466, + "grad_norm": 2.1097088008872906, + "learning_rate": 5.42523864524324e-07, + "loss": 0.5176, + "step": 17661 + }, + { + "epoch": 0.7288107617397045, + "grad_norm": 2.8538416601507954, + "learning_rate": 5.423695520087679e-07, + "loss": 0.5267, + "step": 17662 + }, + { + "epoch": 0.7288520260790625, + "grad_norm": 2.6680514674982225, + "learning_rate": 5.422152565988873e-07, + "loss": 0.5006, + "step": 17663 + }, + { + "epoch": 0.7288932904184204, + "grad_norm": 3.0978595767095074, + "learning_rate": 5.420609782974386e-07, + "loss": 0.4981, + "step": 17664 + }, + { + "epoch": 0.7289345547577784, + "grad_norm": 2.646290030073838, + "learning_rate": 5.419067171071767e-07, + "loss": 0.5487, + "step": 17665 + }, + { + "epoch": 0.7289758190971363, + "grad_norm": 3.1678823499810753, + "learning_rate": 5.417524730308577e-07, + "loss": 0.4813, + "step": 17666 + }, + { + "epoch": 0.7290170834364942, + "grad_norm": 5.497295716255537, + "learning_rate": 5.415982460712368e-07, + "loss": 0.524, + "step": 17667 + }, + { + "epoch": 0.7290583477758521, + "grad_norm": 6.552062901572819, + "learning_rate": 5.41444036231069e-07, + "loss": 0.4483, + "step": 17668 + }, + { + "epoch": 0.72909961211521, + "grad_norm": 9.211629809531626, + "learning_rate": 5.412898435131089e-07, + "loss": 0.5304, + "step": 17669 + }, + { + "epoch": 0.7291408764545679, + "grad_norm": 7.814508104024305, + "learning_rate": 5.411356679201099e-07, + "loss": 0.496, + "step": 17670 + }, + { + "epoch": 0.7291821407939258, + "grad_norm": 6.152643603325938, + "learning_rate": 5.409815094548266e-07, + "loss": 0.5356, + "step": 17671 + }, + { + "epoch": 0.7292234051332838, + "grad_norm": 2.8794233432860104, + "learning_rate": 5.408273681200126e-07, + "loss": 0.503, + "step": 17672 + }, + { + "epoch": 0.7292646694726418, + "grad_norm": 2.8715978707012857, + "learning_rate": 5.406732439184217e-07, + "loss": 0.5393, + "step": 17673 + }, + { + "epoch": 0.7293059338119997, + "grad_norm": 2.918281630228629, + "learning_rate": 5.405191368528063e-07, + "loss": 0.4597, + "step": 17674 + }, + { + "epoch": 0.7293471981513576, + "grad_norm": 3.3202007358814165, + "learning_rate": 5.403650469259192e-07, + "loss": 0.5282, + "step": 17675 + }, + { + "epoch": 0.7293884624907155, + "grad_norm": 4.716707120570759, + "learning_rate": 5.40210974140513e-07, + "loss": 0.5296, + "step": 17676 + }, + { + "epoch": 0.7294297268300735, + "grad_norm": 2.3975308139259384, + "learning_rate": 5.400569184993405e-07, + "loss": 0.4847, + "step": 17677 + }, + { + "epoch": 0.7294709911694314, + "grad_norm": 3.1571767251813516, + "learning_rate": 5.399028800051521e-07, + "loss": 0.5822, + "step": 17678 + }, + { + "epoch": 0.7295122555087893, + "grad_norm": 1.9160375876293285, + "learning_rate": 5.397488586607001e-07, + "loss": 0.506, + "step": 17679 + }, + { + "epoch": 0.7295535198481472, + "grad_norm": 3.186366207654355, + "learning_rate": 5.395948544687358e-07, + "loss": 0.5717, + "step": 17680 + }, + { + "epoch": 0.7295947841875051, + "grad_norm": 7.533088728284386, + "learning_rate": 5.394408674320104e-07, + "loss": 0.5333, + "step": 17681 + }, + { + "epoch": 0.729636048526863, + "grad_norm": 3.690958100218334, + "learning_rate": 5.39286897553274e-07, + "loss": 0.5524, + "step": 17682 + }, + { + "epoch": 0.7296773128662211, + "grad_norm": 2.762090322496207, + "learning_rate": 5.391329448352765e-07, + "loss": 0.5839, + "step": 17683 + }, + { + "epoch": 0.729718577205579, + "grad_norm": 4.975628137100117, + "learning_rate": 5.389790092807684e-07, + "loss": 0.5443, + "step": 17684 + }, + { + "epoch": 0.7297598415449369, + "grad_norm": 3.2722594133839866, + "learning_rate": 5.388250908924997e-07, + "loss": 0.4535, + "step": 17685 + }, + { + "epoch": 0.7298011058842948, + "grad_norm": 5.503229500753011, + "learning_rate": 5.386711896732188e-07, + "loss": 0.5202, + "step": 17686 + }, + { + "epoch": 0.7298423702236527, + "grad_norm": 3.225652359207988, + "learning_rate": 5.385173056256755e-07, + "loss": 0.5147, + "step": 17687 + }, + { + "epoch": 0.7298836345630106, + "grad_norm": 2.451996704692951, + "learning_rate": 5.383634387526182e-07, + "loss": 0.5385, + "step": 17688 + }, + { + "epoch": 0.7299248989023686, + "grad_norm": 3.1011729426898973, + "learning_rate": 5.382095890567961e-07, + "loss": 0.4892, + "step": 17689 + }, + { + "epoch": 0.7299661632417265, + "grad_norm": 3.70593016939313, + "learning_rate": 5.380557565409563e-07, + "loss": 0.4497, + "step": 17690 + }, + { + "epoch": 0.7300074275810844, + "grad_norm": 2.1656316696409412, + "learning_rate": 5.379019412078472e-07, + "loss": 0.4777, + "step": 17691 + }, + { + "epoch": 0.7300486919204423, + "grad_norm": 3.158128565101079, + "learning_rate": 5.377481430602161e-07, + "loss": 0.4965, + "step": 17692 + }, + { + "epoch": 0.7300899562598003, + "grad_norm": 3.2766086139377855, + "learning_rate": 5.375943621008109e-07, + "loss": 0.5726, + "step": 17693 + }, + { + "epoch": 0.7301312205991582, + "grad_norm": 2.7489642930899256, + "learning_rate": 5.374405983323779e-07, + "loss": 0.5534, + "step": 17694 + }, + { + "epoch": 0.7301724849385162, + "grad_norm": 3.1614712206782407, + "learning_rate": 5.372868517576632e-07, + "loss": 0.509, + "step": 17695 + }, + { + "epoch": 0.7302137492778741, + "grad_norm": 3.563695299361864, + "learning_rate": 5.371331223794139e-07, + "loss": 0.4711, + "step": 17696 + }, + { + "epoch": 0.730255013617232, + "grad_norm": 9.454455704405856, + "learning_rate": 5.369794102003756e-07, + "loss": 0.4772, + "step": 17697 + }, + { + "epoch": 0.7302962779565899, + "grad_norm": 4.35508749028463, + "learning_rate": 5.368257152232948e-07, + "loss": 0.5362, + "step": 17698 + }, + { + "epoch": 0.7303375422959478, + "grad_norm": 3.0347692635028958, + "learning_rate": 5.366720374509154e-07, + "loss": 0.4753, + "step": 17699 + }, + { + "epoch": 0.7303788066353057, + "grad_norm": 4.682867708834974, + "learning_rate": 5.365183768859837e-07, + "loss": 0.5275, + "step": 17700 + }, + { + "epoch": 0.7304200709746637, + "grad_norm": 3.4474062066371447, + "learning_rate": 5.363647335312437e-07, + "loss": 0.5196, + "step": 17701 + }, + { + "epoch": 0.7304613353140216, + "grad_norm": 2.8670759937777848, + "learning_rate": 5.36211107389441e-07, + "loss": 0.5079, + "step": 17702 + }, + { + "epoch": 0.7305025996533796, + "grad_norm": 2.006208318844465, + "learning_rate": 5.360574984633182e-07, + "loss": 0.5159, + "step": 17703 + }, + { + "epoch": 0.7305438639927375, + "grad_norm": 2.2697639205904157, + "learning_rate": 5.3590390675562e-07, + "loss": 0.4695, + "step": 17704 + }, + { + "epoch": 0.7305851283320954, + "grad_norm": 6.024266569055512, + "learning_rate": 5.357503322690902e-07, + "loss": 0.523, + "step": 17705 + }, + { + "epoch": 0.7306263926714534, + "grad_norm": 8.87820785222157, + "learning_rate": 5.355967750064714e-07, + "loss": 0.4649, + "step": 17706 + }, + { + "epoch": 0.7306676570108113, + "grad_norm": 3.605495448962002, + "learning_rate": 5.354432349705071e-07, + "loss": 0.492, + "step": 17707 + }, + { + "epoch": 0.7307089213501692, + "grad_norm": 10.329068064185764, + "learning_rate": 5.352897121639391e-07, + "loss": 0.516, + "step": 17708 + }, + { + "epoch": 0.7307501856895271, + "grad_norm": 4.955216083100088, + "learning_rate": 5.351362065895103e-07, + "loss": 0.5238, + "step": 17709 + }, + { + "epoch": 0.730791450028885, + "grad_norm": 4.266132375713457, + "learning_rate": 5.349827182499629e-07, + "loss": 0.5224, + "step": 17710 + }, + { + "epoch": 0.7308327143682429, + "grad_norm": 2.2393522122938956, + "learning_rate": 5.348292471480377e-07, + "loss": 0.428, + "step": 17711 + }, + { + "epoch": 0.7308739787076008, + "grad_norm": 1.7041048825346155, + "learning_rate": 5.346757932864767e-07, + "loss": 0.4739, + "step": 17712 + }, + { + "epoch": 0.7309152430469589, + "grad_norm": 3.335403933228558, + "learning_rate": 5.345223566680209e-07, + "loss": 0.5115, + "step": 17713 + }, + { + "epoch": 0.7309565073863168, + "grad_norm": 2.9459126251047776, + "learning_rate": 5.343689372954117e-07, + "loss": 0.528, + "step": 17714 + }, + { + "epoch": 0.7309977717256747, + "grad_norm": 3.102904740991285, + "learning_rate": 5.342155351713883e-07, + "loss": 0.4758, + "step": 17715 + }, + { + "epoch": 0.7310390360650326, + "grad_norm": 8.779772264954188, + "learning_rate": 5.340621502986915e-07, + "loss": 0.496, + "step": 17716 + }, + { + "epoch": 0.7310803004043905, + "grad_norm": 2.6781325763364574, + "learning_rate": 5.33908782680061e-07, + "loss": 0.4817, + "step": 17717 + }, + { + "epoch": 0.7311215647437485, + "grad_norm": 3.346839577547617, + "learning_rate": 5.337554323182371e-07, + "loss": 0.4878, + "step": 17718 + }, + { + "epoch": 0.7311628290831064, + "grad_norm": 3.606908043216435, + "learning_rate": 5.336020992159578e-07, + "loss": 0.4846, + "step": 17719 + }, + { + "epoch": 0.7312040934224643, + "grad_norm": 3.3433496558135203, + "learning_rate": 5.334487833759631e-07, + "loss": 0.5508, + "step": 17720 + }, + { + "epoch": 0.7312453577618222, + "grad_norm": 2.67716860165107, + "learning_rate": 5.332954848009904e-07, + "loss": 0.56, + "step": 17721 + }, + { + "epoch": 0.7312866221011801, + "grad_norm": 3.0297854830573323, + "learning_rate": 5.331422034937793e-07, + "loss": 0.5161, + "step": 17722 + }, + { + "epoch": 0.731327886440538, + "grad_norm": 4.574406362751547, + "learning_rate": 5.329889394570667e-07, + "loss": 0.4958, + "step": 17723 + }, + { + "epoch": 0.7313691507798961, + "grad_norm": 2.9358144580376146, + "learning_rate": 5.328356926935907e-07, + "loss": 0.4964, + "step": 17724 + }, + { + "epoch": 0.731410415119254, + "grad_norm": 5.123906049256554, + "learning_rate": 5.326824632060886e-07, + "loss": 0.5635, + "step": 17725 + }, + { + "epoch": 0.7314516794586119, + "grad_norm": 9.586381514436487, + "learning_rate": 5.325292509972982e-07, + "loss": 0.522, + "step": 17726 + }, + { + "epoch": 0.7314929437979698, + "grad_norm": 3.978351804765101, + "learning_rate": 5.32376056069955e-07, + "loss": 0.4437, + "step": 17727 + }, + { + "epoch": 0.7315342081373277, + "grad_norm": 2.3439871415377493, + "learning_rate": 5.32222878426796e-07, + "loss": 0.5215, + "step": 17728 + }, + { + "epoch": 0.7315754724766856, + "grad_norm": 7.471410306291803, + "learning_rate": 5.320697180705575e-07, + "loss": 0.5255, + "step": 17729 + }, + { + "epoch": 0.7316167368160436, + "grad_norm": 2.2959635051211356, + "learning_rate": 5.319165750039759e-07, + "loss": 0.498, + "step": 17730 + }, + { + "epoch": 0.7316580011554015, + "grad_norm": 2.216518866993799, + "learning_rate": 5.317634492297852e-07, + "loss": 0.5386, + "step": 17731 + }, + { + "epoch": 0.7316992654947594, + "grad_norm": 4.309107787070628, + "learning_rate": 5.316103407507218e-07, + "loss": 0.5219, + "step": 17732 + }, + { + "epoch": 0.7317405298341173, + "grad_norm": 2.848842945685501, + "learning_rate": 5.3145724956952e-07, + "loss": 0.5311, + "step": 17733 + }, + { + "epoch": 0.7317817941734753, + "grad_norm": 5.456914247788195, + "learning_rate": 5.313041756889153e-07, + "loss": 0.5008, + "step": 17734 + }, + { + "epoch": 0.7318230585128332, + "grad_norm": 3.2220148088569993, + "learning_rate": 5.311511191116413e-07, + "loss": 0.5253, + "step": 17735 + }, + { + "epoch": 0.7318643228521912, + "grad_norm": 2.853958828218082, + "learning_rate": 5.309980798404315e-07, + "loss": 0.558, + "step": 17736 + }, + { + "epoch": 0.7319055871915491, + "grad_norm": 2.1603975231137507, + "learning_rate": 5.3084505787802e-07, + "loss": 0.4549, + "step": 17737 + }, + { + "epoch": 0.731946851530907, + "grad_norm": 2.545154068407005, + "learning_rate": 5.306920532271401e-07, + "loss": 0.5401, + "step": 17738 + }, + { + "epoch": 0.7319881158702649, + "grad_norm": 8.391293795833153, + "learning_rate": 5.305390658905257e-07, + "loss": 0.5177, + "step": 17739 + }, + { + "epoch": 0.7320293802096228, + "grad_norm": 3.5291613630822884, + "learning_rate": 5.303860958709082e-07, + "loss": 0.5008, + "step": 17740 + }, + { + "epoch": 0.7320706445489807, + "grad_norm": 2.9330667990276686, + "learning_rate": 5.302331431710207e-07, + "loss": 0.5019, + "step": 17741 + }, + { + "epoch": 0.7321119088883387, + "grad_norm": 4.3187351237257845, + "learning_rate": 5.300802077935957e-07, + "loss": 0.4993, + "step": 17742 + }, + { + "epoch": 0.7321531732276966, + "grad_norm": 7.843397816266014, + "learning_rate": 5.29927289741364e-07, + "loss": 0.4766, + "step": 17743 + }, + { + "epoch": 0.7321944375670546, + "grad_norm": 6.457375257375908, + "learning_rate": 5.297743890170577e-07, + "loss": 0.5371, + "step": 17744 + }, + { + "epoch": 0.7322357019064125, + "grad_norm": 25.9070328398435, + "learning_rate": 5.29621505623408e-07, + "loss": 0.4777, + "step": 17745 + }, + { + "epoch": 0.7322769662457704, + "grad_norm": 2.098556100917274, + "learning_rate": 5.294686395631461e-07, + "loss": 0.4987, + "step": 17746 + }, + { + "epoch": 0.7323182305851283, + "grad_norm": 3.133965907715403, + "learning_rate": 5.293157908390024e-07, + "loss": 0.5124, + "step": 17747 + }, + { + "epoch": 0.7323594949244863, + "grad_norm": 2.7067991845339963, + "learning_rate": 5.291629594537062e-07, + "loss": 0.5147, + "step": 17748 + }, + { + "epoch": 0.7324007592638442, + "grad_norm": 3.0673893862399972, + "learning_rate": 5.290101454099883e-07, + "loss": 0.52, + "step": 17749 + }, + { + "epoch": 0.7324420236032021, + "grad_norm": 12.080842794940738, + "learning_rate": 5.288573487105785e-07, + "loss": 0.5206, + "step": 17750 + }, + { + "epoch": 0.73248328794256, + "grad_norm": 2.8726527960343415, + "learning_rate": 5.287045693582061e-07, + "loss": 0.5502, + "step": 17751 + }, + { + "epoch": 0.7325245522819179, + "grad_norm": 2.432873319236821, + "learning_rate": 5.285518073555995e-07, + "loss": 0.4857, + "step": 17752 + }, + { + "epoch": 0.7325658166212758, + "grad_norm": 3.2948793517635155, + "learning_rate": 5.283990627054876e-07, + "loss": 0.5133, + "step": 17753 + }, + { + "epoch": 0.7326070809606339, + "grad_norm": 10.147012084249774, + "learning_rate": 5.282463354105991e-07, + "loss": 0.5632, + "step": 17754 + }, + { + "epoch": 0.7326483452999918, + "grad_norm": 338.96704145380966, + "learning_rate": 5.280936254736627e-07, + "loss": 0.4561, + "step": 17755 + }, + { + "epoch": 0.7326896096393497, + "grad_norm": 4.303515275376883, + "learning_rate": 5.279409328974049e-07, + "loss": 0.4422, + "step": 17756 + }, + { + "epoch": 0.7327308739787076, + "grad_norm": 7.741957857648072, + "learning_rate": 5.277882576845537e-07, + "loss": 0.4702, + "step": 17757 + }, + { + "epoch": 0.7327721383180655, + "grad_norm": 42.76816927959384, + "learning_rate": 5.276355998378365e-07, + "loss": 0.535, + "step": 17758 + }, + { + "epoch": 0.7328134026574235, + "grad_norm": 9.77673693355699, + "learning_rate": 5.274829593599803e-07, + "loss": 0.5834, + "step": 17759 + }, + { + "epoch": 0.7328546669967814, + "grad_norm": 2.3836406085694017, + "learning_rate": 5.273303362537113e-07, + "loss": 0.5074, + "step": 17760 + }, + { + "epoch": 0.7328959313361393, + "grad_norm": 3.0816524714842854, + "learning_rate": 5.271777305217553e-07, + "loss": 0.5407, + "step": 17761 + }, + { + "epoch": 0.7329371956754972, + "grad_norm": 14.02213717734184, + "learning_rate": 5.270251421668385e-07, + "loss": 0.5267, + "step": 17762 + }, + { + "epoch": 0.7329784600148551, + "grad_norm": 7.766729628136969, + "learning_rate": 5.268725711916875e-07, + "loss": 0.5215, + "step": 17763 + }, + { + "epoch": 0.7330197243542131, + "grad_norm": 3.5869166083894437, + "learning_rate": 5.26720017599026e-07, + "loss": 0.5411, + "step": 17764 + }, + { + "epoch": 0.7330609886935711, + "grad_norm": 1.974319569771685, + "learning_rate": 5.265674813915798e-07, + "loss": 0.4531, + "step": 17765 + }, + { + "epoch": 0.733102253032929, + "grad_norm": 3.1794253264337393, + "learning_rate": 5.264149625720737e-07, + "loss": 0.5284, + "step": 17766 + }, + { + "epoch": 0.7331435173722869, + "grad_norm": 2.9501637349659946, + "learning_rate": 5.262624611432321e-07, + "loss": 0.5213, + "step": 17767 + }, + { + "epoch": 0.7331847817116448, + "grad_norm": 2.021037430759579, + "learning_rate": 5.261099771077786e-07, + "loss": 0.5257, + "step": 17768 + }, + { + "epoch": 0.7332260460510027, + "grad_norm": 4.206434548786692, + "learning_rate": 5.259575104684372e-07, + "loss": 0.5274, + "step": 17769 + }, + { + "epoch": 0.7332673103903606, + "grad_norm": 5.511306407968382, + "learning_rate": 5.258050612279313e-07, + "loss": 0.5378, + "step": 17770 + }, + { + "epoch": 0.7333085747297186, + "grad_norm": 2.322009328039488, + "learning_rate": 5.256526293889846e-07, + "loss": 0.507, + "step": 17771 + }, + { + "epoch": 0.7333498390690765, + "grad_norm": 2.633289087392332, + "learning_rate": 5.255002149543188e-07, + "loss": 0.4762, + "step": 17772 + }, + { + "epoch": 0.7333911034084344, + "grad_norm": 6.083897545707466, + "learning_rate": 5.253478179266576e-07, + "loss": 0.5514, + "step": 17773 + }, + { + "epoch": 0.7334323677477924, + "grad_norm": 3.229562656674728, + "learning_rate": 5.25195438308722e-07, + "loss": 0.5392, + "step": 17774 + }, + { + "epoch": 0.7334736320871503, + "grad_norm": 5.423268906793724, + "learning_rate": 5.250430761032344e-07, + "loss": 0.5688, + "step": 17775 + }, + { + "epoch": 0.7335148964265082, + "grad_norm": 2.269467479188602, + "learning_rate": 5.248907313129169e-07, + "loss": 0.5117, + "step": 17776 + }, + { + "epoch": 0.7335561607658662, + "grad_norm": 6.564216413183712, + "learning_rate": 5.2473840394049e-07, + "loss": 0.4917, + "step": 17777 + }, + { + "epoch": 0.7335974251052241, + "grad_norm": 1.994665698342306, + "learning_rate": 5.245860939886749e-07, + "loss": 0.4912, + "step": 17778 + }, + { + "epoch": 0.733638689444582, + "grad_norm": 4.925479953609691, + "learning_rate": 5.244338014601922e-07, + "loss": 0.5642, + "step": 17779 + }, + { + "epoch": 0.7336799537839399, + "grad_norm": 2.332817560929618, + "learning_rate": 5.242815263577628e-07, + "loss": 0.5205, + "step": 17780 + }, + { + "epoch": 0.7337212181232978, + "grad_norm": 6.483661847691828, + "learning_rate": 5.241292686841056e-07, + "loss": 0.4991, + "step": 17781 + }, + { + "epoch": 0.7337624824626557, + "grad_norm": 4.6978014191380035, + "learning_rate": 5.23977028441941e-07, + "loss": 0.5193, + "step": 17782 + }, + { + "epoch": 0.7338037468020137, + "grad_norm": 4.462292281641714, + "learning_rate": 5.23824805633989e-07, + "loss": 0.53, + "step": 17783 + }, + { + "epoch": 0.7338450111413716, + "grad_norm": 2.634921108185806, + "learning_rate": 5.236726002629672e-07, + "loss": 0.5031, + "step": 17784 + }, + { + "epoch": 0.7338862754807296, + "grad_norm": 2.3573520337309457, + "learning_rate": 5.235204123315953e-07, + "loss": 0.5044, + "step": 17785 + }, + { + "epoch": 0.7339275398200875, + "grad_norm": 2.4036497404958856, + "learning_rate": 5.233682418425921e-07, + "loss": 0.563, + "step": 17786 + }, + { + "epoch": 0.7339688041594454, + "grad_norm": 3.187234891042905, + "learning_rate": 5.232160887986748e-07, + "loss": 0.5509, + "step": 17787 + }, + { + "epoch": 0.7340100684988033, + "grad_norm": 2.9070072147129733, + "learning_rate": 5.23063953202562e-07, + "loss": 0.5301, + "step": 17788 + }, + { + "epoch": 0.7340513328381613, + "grad_norm": 4.605970283376477, + "learning_rate": 5.229118350569706e-07, + "loss": 0.4621, + "step": 17789 + }, + { + "epoch": 0.7340925971775192, + "grad_norm": 1.8531022151998697, + "learning_rate": 5.227597343646181e-07, + "loss": 0.4271, + "step": 17790 + }, + { + "epoch": 0.7341338615168771, + "grad_norm": 2.4940575596629664, + "learning_rate": 5.226076511282214e-07, + "loss": 0.47, + "step": 17791 + }, + { + "epoch": 0.734175125856235, + "grad_norm": 3.0096812569441505, + "learning_rate": 5.224555853504977e-07, + "loss": 0.4879, + "step": 17792 + }, + { + "epoch": 0.7342163901955929, + "grad_norm": 4.431115121524322, + "learning_rate": 5.223035370341622e-07, + "loss": 0.506, + "step": 17793 + }, + { + "epoch": 0.7342576545349508, + "grad_norm": 2.547342140974912, + "learning_rate": 5.221515061819311e-07, + "loss": 0.4375, + "step": 17794 + }, + { + "epoch": 0.7342989188743089, + "grad_norm": 2.6360725662954057, + "learning_rate": 5.219994927965206e-07, + "loss": 0.518, + "step": 17795 + }, + { + "epoch": 0.7343401832136668, + "grad_norm": 3.8674561276485395, + "learning_rate": 5.21847496880646e-07, + "loss": 0.4873, + "step": 17796 + }, + { + "epoch": 0.7343814475530247, + "grad_norm": 8.733726810630218, + "learning_rate": 5.216955184370217e-07, + "loss": 0.4712, + "step": 17797 + }, + { + "epoch": 0.7344227118923826, + "grad_norm": 4.118503556522587, + "learning_rate": 5.215435574683627e-07, + "loss": 0.5652, + "step": 17798 + }, + { + "epoch": 0.7344639762317405, + "grad_norm": 4.944994357659381, + "learning_rate": 5.213916139773841e-07, + "loss": 0.503, + "step": 17799 + }, + { + "epoch": 0.7345052405710984, + "grad_norm": 5.675414375450074, + "learning_rate": 5.212396879667993e-07, + "loss": 0.5221, + "step": 17800 + }, + { + "epoch": 0.7345465049104564, + "grad_norm": 2.034979006097952, + "learning_rate": 5.210877794393217e-07, + "loss": 0.5252, + "step": 17801 + }, + { + "epoch": 0.7345877692498143, + "grad_norm": 2.5884012335363096, + "learning_rate": 5.209358883976653e-07, + "loss": 0.518, + "step": 17802 + }, + { + "epoch": 0.7346290335891722, + "grad_norm": 2.907605860504898, + "learning_rate": 5.20784014844543e-07, + "loss": 0.4825, + "step": 17803 + }, + { + "epoch": 0.7346702979285301, + "grad_norm": 3.070503707047858, + "learning_rate": 5.206321587826684e-07, + "loss": 0.5033, + "step": 17804 + }, + { + "epoch": 0.7347115622678881, + "grad_norm": 4.588456571023903, + "learning_rate": 5.204803202147529e-07, + "loss": 0.5181, + "step": 17805 + }, + { + "epoch": 0.7347528266072461, + "grad_norm": 3.4776876890099193, + "learning_rate": 5.203284991435093e-07, + "loss": 0.5923, + "step": 17806 + }, + { + "epoch": 0.734794090946604, + "grad_norm": 3.670530781676713, + "learning_rate": 5.201766955716495e-07, + "loss": 0.4774, + "step": 17807 + }, + { + "epoch": 0.7348353552859619, + "grad_norm": 2.6542789035835668, + "learning_rate": 5.200249095018855e-07, + "loss": 0.4992, + "step": 17808 + }, + { + "epoch": 0.7348766196253198, + "grad_norm": 2.4473339856762513, + "learning_rate": 5.198731409369278e-07, + "loss": 0.506, + "step": 17809 + }, + { + "epoch": 0.7349178839646777, + "grad_norm": 7.492245159708519, + "learning_rate": 5.197213898794875e-07, + "loss": 0.5406, + "step": 17810 + }, + { + "epoch": 0.7349591483040356, + "grad_norm": 2.583300662810117, + "learning_rate": 5.195696563322755e-07, + "loss": 0.4612, + "step": 17811 + }, + { + "epoch": 0.7350004126433936, + "grad_norm": 10.358842248006095, + "learning_rate": 5.194179402980026e-07, + "loss": 0.5531, + "step": 17812 + }, + { + "epoch": 0.7350416769827515, + "grad_norm": 3.57991115823451, + "learning_rate": 5.192662417793784e-07, + "loss": 0.5493, + "step": 17813 + }, + { + "epoch": 0.7350829413221094, + "grad_norm": 1.8369796375256846, + "learning_rate": 5.19114560779112e-07, + "loss": 0.4808, + "step": 17814 + }, + { + "epoch": 0.7351242056614674, + "grad_norm": 4.7076958900413555, + "learning_rate": 5.189628972999134e-07, + "loss": 0.4577, + "step": 17815 + }, + { + "epoch": 0.7351654700008253, + "grad_norm": 2.689004191066361, + "learning_rate": 5.188112513444915e-07, + "loss": 0.5412, + "step": 17816 + }, + { + "epoch": 0.7352067343401832, + "grad_norm": 2.971646785324917, + "learning_rate": 5.186596229155558e-07, + "loss": 0.5227, + "step": 17817 + }, + { + "epoch": 0.7352479986795412, + "grad_norm": 2.482541568124734, + "learning_rate": 5.185080120158136e-07, + "loss": 0.5149, + "step": 17818 + }, + { + "epoch": 0.7352892630188991, + "grad_norm": 2.2438704103787455, + "learning_rate": 5.183564186479738e-07, + "loss": 0.5225, + "step": 17819 + }, + { + "epoch": 0.735330527358257, + "grad_norm": 4.117007107063772, + "learning_rate": 5.182048428147439e-07, + "loss": 0.5147, + "step": 17820 + }, + { + "epoch": 0.7353717916976149, + "grad_norm": 2.8673462019630613, + "learning_rate": 5.180532845188321e-07, + "loss": 0.5306, + "step": 17821 + }, + { + "epoch": 0.7354130560369728, + "grad_norm": 6.4578914446253055, + "learning_rate": 5.179017437629446e-07, + "loss": 0.4844, + "step": 17822 + }, + { + "epoch": 0.7354543203763307, + "grad_norm": 2.843150200688882, + "learning_rate": 5.177502205497889e-07, + "loss": 0.5449, + "step": 17823 + }, + { + "epoch": 0.7354955847156887, + "grad_norm": 35.91541993374764, + "learning_rate": 5.175987148820719e-07, + "loss": 0.488, + "step": 17824 + }, + { + "epoch": 0.7355368490550467, + "grad_norm": 5.3077643755275385, + "learning_rate": 5.174472267624989e-07, + "loss": 0.5603, + "step": 17825 + }, + { + "epoch": 0.7355781133944046, + "grad_norm": 3.3006527621255803, + "learning_rate": 5.172957561937769e-07, + "loss": 0.4659, + "step": 17826 + }, + { + "epoch": 0.7356193777337625, + "grad_norm": 3.5738278374485626, + "learning_rate": 5.171443031786105e-07, + "loss": 0.5392, + "step": 17827 + }, + { + "epoch": 0.7356606420731204, + "grad_norm": 5.859719012554108, + "learning_rate": 5.169928677197056e-07, + "loss": 0.4848, + "step": 17828 + }, + { + "epoch": 0.7357019064124783, + "grad_norm": 4.332818216922888, + "learning_rate": 5.168414498197679e-07, + "loss": 0.4649, + "step": 17829 + }, + { + "epoch": 0.7357431707518363, + "grad_norm": 10.039252036156716, + "learning_rate": 5.166900494815007e-07, + "loss": 0.5439, + "step": 17830 + }, + { + "epoch": 0.7357844350911942, + "grad_norm": 6.146917317777505, + "learning_rate": 5.16538666707609e-07, + "loss": 0.4971, + "step": 17831 + }, + { + "epoch": 0.7358256994305521, + "grad_norm": 3.116155088707073, + "learning_rate": 5.163873015007971e-07, + "loss": 0.5044, + "step": 17832 + }, + { + "epoch": 0.73586696376991, + "grad_norm": 6.001864621428631, + "learning_rate": 5.162359538637692e-07, + "loss": 0.5563, + "step": 17833 + }, + { + "epoch": 0.7359082281092679, + "grad_norm": 2.825351636851099, + "learning_rate": 5.160846237992275e-07, + "loss": 0.5076, + "step": 17834 + }, + { + "epoch": 0.735949492448626, + "grad_norm": 2.2455020134126378, + "learning_rate": 5.159333113098758e-07, + "loss": 0.4734, + "step": 17835 + }, + { + "epoch": 0.7359907567879839, + "grad_norm": 2.2668301066329413, + "learning_rate": 5.15782016398417e-07, + "loss": 0.5321, + "step": 17836 + }, + { + "epoch": 0.7360320211273418, + "grad_norm": 3.1433968515193405, + "learning_rate": 5.156307390675538e-07, + "loss": 0.5088, + "step": 17837 + }, + { + "epoch": 0.7360732854666997, + "grad_norm": 5.732014939787121, + "learning_rate": 5.15479479319988e-07, + "loss": 0.5361, + "step": 17838 + }, + { + "epoch": 0.7361145498060576, + "grad_norm": 3.315872066860562, + "learning_rate": 5.153282371584218e-07, + "loss": 0.4491, + "step": 17839 + }, + { + "epoch": 0.7361558141454155, + "grad_norm": 2.135130319946828, + "learning_rate": 5.151770125855561e-07, + "loss": 0.4792, + "step": 17840 + }, + { + "epoch": 0.7361970784847734, + "grad_norm": 1.9125636413112375, + "learning_rate": 5.150258056040932e-07, + "loss": 0.5151, + "step": 17841 + }, + { + "epoch": 0.7362383428241314, + "grad_norm": 2.342936316678821, + "learning_rate": 5.148746162167329e-07, + "loss": 0.5382, + "step": 17842 + }, + { + "epoch": 0.7362796071634893, + "grad_norm": 4.715285361593035, + "learning_rate": 5.147234444261763e-07, + "loss": 0.5396, + "step": 17843 + }, + { + "epoch": 0.7363208715028472, + "grad_norm": 5.499052527283797, + "learning_rate": 5.145722902351238e-07, + "loss": 0.526, + "step": 17844 + }, + { + "epoch": 0.7363621358422051, + "grad_norm": 2.341859561022538, + "learning_rate": 5.144211536462759e-07, + "loss": 0.4737, + "step": 17845 + }, + { + "epoch": 0.7364034001815631, + "grad_norm": 2.3255380738764813, + "learning_rate": 5.142700346623313e-07, + "loss": 0.5355, + "step": 17846 + }, + { + "epoch": 0.736444664520921, + "grad_norm": 2.248039674699051, + "learning_rate": 5.141189332859897e-07, + "loss": 0.5186, + "step": 17847 + }, + { + "epoch": 0.736485928860279, + "grad_norm": 2.5149994912787994, + "learning_rate": 5.139678495199502e-07, + "loss": 0.4811, + "step": 17848 + }, + { + "epoch": 0.7365271931996369, + "grad_norm": 2.611229804560084, + "learning_rate": 5.138167833669121e-07, + "loss": 0.4874, + "step": 17849 + }, + { + "epoch": 0.7365684575389948, + "grad_norm": 3.1696704580284134, + "learning_rate": 5.136657348295729e-07, + "loss": 0.4984, + "step": 17850 + }, + { + "epoch": 0.7366097218783527, + "grad_norm": 2.1761085072064588, + "learning_rate": 5.135147039106312e-07, + "loss": 0.4947, + "step": 17851 + }, + { + "epoch": 0.7366509862177106, + "grad_norm": 2.384052988546525, + "learning_rate": 5.133636906127849e-07, + "loss": 0.5453, + "step": 17852 + }, + { + "epoch": 0.7366922505570686, + "grad_norm": 13.20997017198776, + "learning_rate": 5.13212694938731e-07, + "loss": 0.487, + "step": 17853 + }, + { + "epoch": 0.7367335148964265, + "grad_norm": 3.7368413859136496, + "learning_rate": 5.130617168911672e-07, + "loss": 0.5061, + "step": 17854 + }, + { + "epoch": 0.7367747792357844, + "grad_norm": 17.310833240143182, + "learning_rate": 5.129107564727898e-07, + "loss": 0.5228, + "step": 17855 + }, + { + "epoch": 0.7368160435751424, + "grad_norm": 3.882563638699296, + "learning_rate": 5.127598136862956e-07, + "loss": 0.5032, + "step": 17856 + }, + { + "epoch": 0.7368573079145003, + "grad_norm": 2.984300020050467, + "learning_rate": 5.126088885343807e-07, + "loss": 0.5138, + "step": 17857 + }, + { + "epoch": 0.7368985722538582, + "grad_norm": 6.267073060273939, + "learning_rate": 5.124579810197418e-07, + "loss": 0.5604, + "step": 17858 + }, + { + "epoch": 0.7369398365932162, + "grad_norm": 3.0657285609048563, + "learning_rate": 5.123070911450732e-07, + "loss": 0.463, + "step": 17859 + }, + { + "epoch": 0.7369811009325741, + "grad_norm": 3.670544068205869, + "learning_rate": 5.121562189130708e-07, + "loss": 0.5435, + "step": 17860 + }, + { + "epoch": 0.737022365271932, + "grad_norm": 3.2236714774631836, + "learning_rate": 5.1200536432643e-07, + "loss": 0.4881, + "step": 17861 + }, + { + "epoch": 0.7370636296112899, + "grad_norm": 4.694100415391246, + "learning_rate": 5.118545273878443e-07, + "loss": 0.5146, + "step": 17862 + }, + { + "epoch": 0.7371048939506478, + "grad_norm": 8.197106863088285, + "learning_rate": 5.11703708100009e-07, + "loss": 0.5742, + "step": 17863 + }, + { + "epoch": 0.7371461582900057, + "grad_norm": 2.521631560114745, + "learning_rate": 5.115529064656177e-07, + "loss": 0.5148, + "step": 17864 + }, + { + "epoch": 0.7371874226293637, + "grad_norm": 1.9398026695463004, + "learning_rate": 5.114021224873647e-07, + "loss": 0.4954, + "step": 17865 + }, + { + "epoch": 0.7372286869687217, + "grad_norm": 5.0055158143673895, + "learning_rate": 5.112513561679429e-07, + "loss": 0.5279, + "step": 17866 + }, + { + "epoch": 0.7372699513080796, + "grad_norm": 2.3764080356531347, + "learning_rate": 5.11100607510045e-07, + "loss": 0.4896, + "step": 17867 + }, + { + "epoch": 0.7373112156474375, + "grad_norm": 2.5725350828699614, + "learning_rate": 5.109498765163639e-07, + "loss": 0.5164, + "step": 17868 + }, + { + "epoch": 0.7373524799867954, + "grad_norm": 4.565806072074164, + "learning_rate": 5.107991631895922e-07, + "loss": 0.584, + "step": 17869 + }, + { + "epoch": 0.7373937443261533, + "grad_norm": 4.25388793657548, + "learning_rate": 5.106484675324226e-07, + "loss": 0.5683, + "step": 17870 + }, + { + "epoch": 0.7374350086655113, + "grad_norm": 10.510146340098494, + "learning_rate": 5.104977895475459e-07, + "loss": 0.4911, + "step": 17871 + }, + { + "epoch": 0.7374762730048692, + "grad_norm": 8.445457849099865, + "learning_rate": 5.103471292376539e-07, + "loss": 0.5198, + "step": 17872 + }, + { + "epoch": 0.7375175373442271, + "grad_norm": 2.2417927983233517, + "learning_rate": 5.101964866054381e-07, + "loss": 0.52, + "step": 17873 + }, + { + "epoch": 0.737558801683585, + "grad_norm": 2.56671425239543, + "learning_rate": 5.100458616535896e-07, + "loss": 0.495, + "step": 17874 + }, + { + "epoch": 0.7376000660229429, + "grad_norm": 2.345295970551788, + "learning_rate": 5.09895254384798e-07, + "loss": 0.4508, + "step": 17875 + }, + { + "epoch": 0.737641330362301, + "grad_norm": 4.50298531419152, + "learning_rate": 5.097446648017539e-07, + "loss": 0.4897, + "step": 17876 + }, + { + "epoch": 0.7376825947016589, + "grad_norm": 2.9590903203142886, + "learning_rate": 5.095940929071475e-07, + "loss": 0.5062, + "step": 17877 + }, + { + "epoch": 0.7377238590410168, + "grad_norm": 2.9101041144651707, + "learning_rate": 5.09443538703669e-07, + "loss": 0.4625, + "step": 17878 + }, + { + "epoch": 0.7377651233803747, + "grad_norm": 3.1393954886859294, + "learning_rate": 5.092930021940061e-07, + "loss": 0.5226, + "step": 17879 + }, + { + "epoch": 0.7378063877197326, + "grad_norm": 2.831366005758987, + "learning_rate": 5.091424833808484e-07, + "loss": 0.4976, + "step": 17880 + }, + { + "epoch": 0.7378476520590905, + "grad_norm": 2.925225928777325, + "learning_rate": 5.089919822668848e-07, + "loss": 0.5409, + "step": 17881 + }, + { + "epoch": 0.7378889163984484, + "grad_norm": 2.6105876002873467, + "learning_rate": 5.088414988548039e-07, + "loss": 0.4737, + "step": 17882 + }, + { + "epoch": 0.7379301807378064, + "grad_norm": 2.6818436827616665, + "learning_rate": 5.08691033147293e-07, + "loss": 0.5366, + "step": 17883 + }, + { + "epoch": 0.7379714450771643, + "grad_norm": 3.0405031864600414, + "learning_rate": 5.085405851470401e-07, + "loss": 0.5265, + "step": 17884 + }, + { + "epoch": 0.7380127094165222, + "grad_norm": 2.119828308456333, + "learning_rate": 5.083901548567325e-07, + "loss": 0.4877, + "step": 17885 + }, + { + "epoch": 0.7380539737558802, + "grad_norm": 3.523446048334528, + "learning_rate": 5.082397422790579e-07, + "loss": 0.5113, + "step": 17886 + }, + { + "epoch": 0.7380952380952381, + "grad_norm": 3.005073491044464, + "learning_rate": 5.080893474167018e-07, + "loss": 0.522, + "step": 17887 + }, + { + "epoch": 0.738136502434596, + "grad_norm": 7.737041905591929, + "learning_rate": 5.079389702723516e-07, + "loss": 0.5159, + "step": 17888 + }, + { + "epoch": 0.738177766773954, + "grad_norm": 3.366043907651435, + "learning_rate": 5.077886108486931e-07, + "loss": 0.488, + "step": 17889 + }, + { + "epoch": 0.7382190311133119, + "grad_norm": 5.3594708926221335, + "learning_rate": 5.076382691484128e-07, + "loss": 0.5318, + "step": 17890 + }, + { + "epoch": 0.7382602954526698, + "grad_norm": 2.878555309705292, + "learning_rate": 5.074879451741953e-07, + "loss": 0.4852, + "step": 17891 + }, + { + "epoch": 0.7383015597920277, + "grad_norm": 2.97688995635455, + "learning_rate": 5.073376389287255e-07, + "loss": 0.5103, + "step": 17892 + }, + { + "epoch": 0.7383428241313856, + "grad_norm": 2.0346186651966507, + "learning_rate": 5.071873504146887e-07, + "loss": 0.4911, + "step": 17893 + }, + { + "epoch": 0.7383840884707435, + "grad_norm": 11.768733541905393, + "learning_rate": 5.070370796347696e-07, + "loss": 0.5708, + "step": 17894 + }, + { + "epoch": 0.7384253528101015, + "grad_norm": 8.326310090930964, + "learning_rate": 5.068868265916527e-07, + "loss": 0.5066, + "step": 17895 + }, + { + "epoch": 0.7384666171494595, + "grad_norm": 2.3775331189395805, + "learning_rate": 5.067365912880209e-07, + "loss": 0.5406, + "step": 17896 + }, + { + "epoch": 0.7385078814888174, + "grad_norm": 2.1182380299150445, + "learning_rate": 5.065863737265585e-07, + "loss": 0.5278, + "step": 17897 + }, + { + "epoch": 0.7385491458281753, + "grad_norm": 3.109249168922403, + "learning_rate": 5.064361739099485e-07, + "loss": 0.526, + "step": 17898 + }, + { + "epoch": 0.7385904101675332, + "grad_norm": 3.4209249889741575, + "learning_rate": 5.062859918408744e-07, + "loss": 0.4929, + "step": 17899 + }, + { + "epoch": 0.7386316745068912, + "grad_norm": 2.798211654788514, + "learning_rate": 5.061358275220179e-07, + "loss": 0.5386, + "step": 17900 + }, + { + "epoch": 0.7386729388462491, + "grad_norm": 3.4544861763307018, + "learning_rate": 5.059856809560618e-07, + "loss": 0.5125, + "step": 17901 + }, + { + "epoch": 0.738714203185607, + "grad_norm": 2.671230226306262, + "learning_rate": 5.058355521456887e-07, + "loss": 0.5167, + "step": 17902 + }, + { + "epoch": 0.7387554675249649, + "grad_norm": 6.05327407608969, + "learning_rate": 5.05685441093579e-07, + "loss": 0.5507, + "step": 17903 + }, + { + "epoch": 0.7387967318643228, + "grad_norm": 2.812213522035978, + "learning_rate": 5.055353478024152e-07, + "loss": 0.4768, + "step": 17904 + }, + { + "epoch": 0.7388379962036807, + "grad_norm": 2.4856812741052132, + "learning_rate": 5.053852722748775e-07, + "loss": 0.527, + "step": 17905 + }, + { + "epoch": 0.7388792605430387, + "grad_norm": 4.026057844525751, + "learning_rate": 5.052352145136469e-07, + "loss": 0.5185, + "step": 17906 + }, + { + "epoch": 0.7389205248823967, + "grad_norm": 15.230416285059702, + "learning_rate": 5.050851745214044e-07, + "loss": 0.4952, + "step": 17907 + }, + { + "epoch": 0.7389617892217546, + "grad_norm": 3.058501383494336, + "learning_rate": 5.049351523008292e-07, + "loss": 0.517, + "step": 17908 + }, + { + "epoch": 0.7390030535611125, + "grad_norm": 3.5239828567437845, + "learning_rate": 5.047851478546013e-07, + "loss": 0.5491, + "step": 17909 + }, + { + "epoch": 0.7390443179004704, + "grad_norm": 2.8424906166219563, + "learning_rate": 5.046351611854003e-07, + "loss": 0.5337, + "step": 17910 + }, + { + "epoch": 0.7390855822398283, + "grad_norm": 2.3435174144583923, + "learning_rate": 5.044851922959058e-07, + "loss": 0.4922, + "step": 17911 + }, + { + "epoch": 0.7391268465791863, + "grad_norm": 2.6278286152323584, + "learning_rate": 5.043352411887957e-07, + "loss": 0.4784, + "step": 17912 + }, + { + "epoch": 0.7391681109185442, + "grad_norm": 3.221065284995768, + "learning_rate": 5.041853078667491e-07, + "loss": 0.5088, + "step": 17913 + }, + { + "epoch": 0.7392093752579021, + "grad_norm": 5.282457731024019, + "learning_rate": 5.04035392332444e-07, + "loss": 0.4648, + "step": 17914 + }, + { + "epoch": 0.73925063959726, + "grad_norm": 3.292721391797092, + "learning_rate": 5.038854945885588e-07, + "loss": 0.5708, + "step": 17915 + }, + { + "epoch": 0.7392919039366179, + "grad_norm": 3.450306332819502, + "learning_rate": 5.0373561463777e-07, + "loss": 0.5372, + "step": 17916 + }, + { + "epoch": 0.739333168275976, + "grad_norm": 3.0210837877543675, + "learning_rate": 5.035857524827555e-07, + "loss": 0.5033, + "step": 17917 + }, + { + "epoch": 0.7393744326153339, + "grad_norm": 4.257809748913013, + "learning_rate": 5.034359081261925e-07, + "loss": 0.4725, + "step": 17918 + }, + { + "epoch": 0.7394156969546918, + "grad_norm": 3.1392063067453964, + "learning_rate": 5.032860815707572e-07, + "loss": 0.5442, + "step": 17919 + }, + { + "epoch": 0.7394569612940497, + "grad_norm": 2.3935029771831036, + "learning_rate": 5.031362728191254e-07, + "loss": 0.5484, + "step": 17920 + }, + { + "epoch": 0.7394982256334076, + "grad_norm": 5.112425654936469, + "learning_rate": 5.029864818739737e-07, + "loss": 0.4754, + "step": 17921 + }, + { + "epoch": 0.7395394899727655, + "grad_norm": 19.64235072605581, + "learning_rate": 5.028367087379773e-07, + "loss": 0.5218, + "step": 17922 + }, + { + "epoch": 0.7395807543121234, + "grad_norm": 4.750441678223367, + "learning_rate": 5.026869534138125e-07, + "loss": 0.4358, + "step": 17923 + }, + { + "epoch": 0.7396220186514814, + "grad_norm": 4.027029799677453, + "learning_rate": 5.02537215904153e-07, + "loss": 0.5244, + "step": 17924 + }, + { + "epoch": 0.7396632829908393, + "grad_norm": 8.578075042611967, + "learning_rate": 5.023874962116743e-07, + "loss": 0.4858, + "step": 17925 + }, + { + "epoch": 0.7397045473301972, + "grad_norm": 3.5124744261103076, + "learning_rate": 5.022377943390502e-07, + "loss": 0.5139, + "step": 17926 + }, + { + "epoch": 0.7397458116695552, + "grad_norm": 3.4795596618061273, + "learning_rate": 5.02088110288956e-07, + "loss": 0.5223, + "step": 17927 + }, + { + "epoch": 0.7397870760089131, + "grad_norm": 2.364247361598706, + "learning_rate": 5.019384440640637e-07, + "loss": 0.5621, + "step": 17928 + }, + { + "epoch": 0.739828340348271, + "grad_norm": 2.9441235172365183, + "learning_rate": 5.017887956670478e-07, + "loss": 0.545, + "step": 17929 + }, + { + "epoch": 0.739869604687629, + "grad_norm": 2.3185027023200737, + "learning_rate": 5.01639165100581e-07, + "loss": 0.546, + "step": 17930 + }, + { + "epoch": 0.7399108690269869, + "grad_norm": 2.748435388346257, + "learning_rate": 5.014895523673369e-07, + "loss": 0.4951, + "step": 17931 + }, + { + "epoch": 0.7399521333663448, + "grad_norm": 2.4793472548389284, + "learning_rate": 5.013399574699872e-07, + "loss": 0.5119, + "step": 17932 + }, + { + "epoch": 0.7399933977057027, + "grad_norm": 2.903644174334943, + "learning_rate": 5.011903804112036e-07, + "loss": 0.4405, + "step": 17933 + }, + { + "epoch": 0.7400346620450606, + "grad_norm": 1.9929317112517722, + "learning_rate": 5.010408211936583e-07, + "loss": 0.4968, + "step": 17934 + }, + { + "epoch": 0.7400759263844185, + "grad_norm": 1.9092750016344708, + "learning_rate": 5.008912798200231e-07, + "loss": 0.4648, + "step": 17935 + }, + { + "epoch": 0.7401171907237765, + "grad_norm": 2.479043295075367, + "learning_rate": 5.007417562929696e-07, + "loss": 0.4958, + "step": 17936 + }, + { + "epoch": 0.7401584550631345, + "grad_norm": 3.5398224150440654, + "learning_rate": 5.005922506151675e-07, + "loss": 0.5026, + "step": 17937 + }, + { + "epoch": 0.7401997194024924, + "grad_norm": 20.962219124729426, + "learning_rate": 5.00442762789288e-07, + "loss": 0.5272, + "step": 17938 + }, + { + "epoch": 0.7402409837418503, + "grad_norm": 2.238006690231729, + "learning_rate": 5.002932928180018e-07, + "loss": 0.5221, + "step": 17939 + }, + { + "epoch": 0.7402822480812082, + "grad_norm": 3.3130059281771054, + "learning_rate": 5.001438407039775e-07, + "loss": 0.537, + "step": 17940 + }, + { + "epoch": 0.7403235124205662, + "grad_norm": 5.692795152644496, + "learning_rate": 4.999944064498858e-07, + "loss": 0.5116, + "step": 17941 + }, + { + "epoch": 0.7403647767599241, + "grad_norm": 2.705856055358354, + "learning_rate": 4.998449900583953e-07, + "loss": 0.4891, + "step": 17942 + }, + { + "epoch": 0.740406041099282, + "grad_norm": 4.487469773733289, + "learning_rate": 4.996955915321761e-07, + "loss": 0.4764, + "step": 17943 + }, + { + "epoch": 0.7404473054386399, + "grad_norm": 3.2953807756345124, + "learning_rate": 4.995462108738959e-07, + "loss": 0.4925, + "step": 17944 + }, + { + "epoch": 0.7404885697779978, + "grad_norm": 2.420853705624711, + "learning_rate": 4.993968480862227e-07, + "loss": 0.5336, + "step": 17945 + }, + { + "epoch": 0.7405298341173557, + "grad_norm": 3.1385547348348295, + "learning_rate": 4.992475031718247e-07, + "loss": 0.494, + "step": 17946 + }, + { + "epoch": 0.7405710984567138, + "grad_norm": 2.7654283270457287, + "learning_rate": 4.9909817613337e-07, + "loss": 0.5776, + "step": 17947 + }, + { + "epoch": 0.7406123627960717, + "grad_norm": 3.504583863225446, + "learning_rate": 4.98948866973526e-07, + "loss": 0.5233, + "step": 17948 + }, + { + "epoch": 0.7406536271354296, + "grad_norm": 8.776685696494958, + "learning_rate": 4.987995756949592e-07, + "loss": 0.5668, + "step": 17949 + }, + { + "epoch": 0.7406948914747875, + "grad_norm": 4.189548889241498, + "learning_rate": 4.986503023003366e-07, + "loss": 0.509, + "step": 17950 + }, + { + "epoch": 0.7407361558141454, + "grad_norm": 2.508276212528422, + "learning_rate": 4.985010467923245e-07, + "loss": 0.5433, + "step": 17951 + }, + { + "epoch": 0.7407774201535033, + "grad_norm": 2.3083394604018035, + "learning_rate": 4.983518091735897e-07, + "loss": 0.4831, + "step": 17952 + }, + { + "epoch": 0.7408186844928613, + "grad_norm": 2.0912805497538582, + "learning_rate": 4.982025894467968e-07, + "loss": 0.5478, + "step": 17953 + }, + { + "epoch": 0.7408599488322192, + "grad_norm": 3.130702584197683, + "learning_rate": 4.980533876146116e-07, + "loss": 0.5284, + "step": 17954 + }, + { + "epoch": 0.7409012131715771, + "grad_norm": 3.203239288334409, + "learning_rate": 4.979042036796995e-07, + "loss": 0.5062, + "step": 17955 + }, + { + "epoch": 0.740942477510935, + "grad_norm": 6.213595825504419, + "learning_rate": 4.977550376447257e-07, + "loss": 0.4774, + "step": 17956 + }, + { + "epoch": 0.740983741850293, + "grad_norm": 2.281009738472418, + "learning_rate": 4.976058895123541e-07, + "loss": 0.5155, + "step": 17957 + }, + { + "epoch": 0.741025006189651, + "grad_norm": 10.149974318812385, + "learning_rate": 4.974567592852486e-07, + "loss": 0.5038, + "step": 17958 + }, + { + "epoch": 0.7410662705290089, + "grad_norm": 3.8162788963510734, + "learning_rate": 4.97307646966073e-07, + "loss": 0.5395, + "step": 17959 + }, + { + "epoch": 0.7411075348683668, + "grad_norm": 5.447326797676109, + "learning_rate": 4.97158552557492e-07, + "loss": 0.5384, + "step": 17960 + }, + { + "epoch": 0.7411487992077247, + "grad_norm": 2.4746886902147405, + "learning_rate": 4.970094760621674e-07, + "loss": 0.5415, + "step": 17961 + }, + { + "epoch": 0.7411900635470826, + "grad_norm": 3.5245011307588587, + "learning_rate": 4.968604174827627e-07, + "loss": 0.5236, + "step": 17962 + }, + { + "epoch": 0.7412313278864405, + "grad_norm": 2.881657071718192, + "learning_rate": 4.967113768219403e-07, + "loss": 0.5563, + "step": 17963 + }, + { + "epoch": 0.7412725922257984, + "grad_norm": 3.7105168371350135, + "learning_rate": 4.96562354082363e-07, + "loss": 0.5317, + "step": 17964 + }, + { + "epoch": 0.7413138565651564, + "grad_norm": 3.0262341905983186, + "learning_rate": 4.964133492666919e-07, + "loss": 0.5585, + "step": 17965 + }, + { + "epoch": 0.7413551209045143, + "grad_norm": 12.332910727970418, + "learning_rate": 4.962643623775889e-07, + "loss": 0.5127, + "step": 17966 + }, + { + "epoch": 0.7413963852438722, + "grad_norm": 3.100901827927117, + "learning_rate": 4.961153934177153e-07, + "loss": 0.5782, + "step": 17967 + }, + { + "epoch": 0.7414376495832302, + "grad_norm": 4.436670208578519, + "learning_rate": 4.959664423897328e-07, + "loss": 0.5098, + "step": 17968 + }, + { + "epoch": 0.7414789139225881, + "grad_norm": 4.688996848192955, + "learning_rate": 4.958175092963007e-07, + "loss": 0.5665, + "step": 17969 + }, + { + "epoch": 0.741520178261946, + "grad_norm": 3.179474462696255, + "learning_rate": 4.956685941400805e-07, + "loss": 0.527, + "step": 17970 + }, + { + "epoch": 0.741561442601304, + "grad_norm": 2.759783442476308, + "learning_rate": 4.955196969237313e-07, + "loss": 0.484, + "step": 17971 + }, + { + "epoch": 0.7416027069406619, + "grad_norm": 2.4373251323675054, + "learning_rate": 4.953708176499128e-07, + "loss": 0.4734, + "step": 17972 + }, + { + "epoch": 0.7416439712800198, + "grad_norm": 3.482924111722481, + "learning_rate": 4.952219563212855e-07, + "loss": 0.5014, + "step": 17973 + }, + { + "epoch": 0.7416852356193777, + "grad_norm": 2.8036270948589337, + "learning_rate": 4.95073112940507e-07, + "loss": 0.4813, + "step": 17974 + }, + { + "epoch": 0.7417264999587356, + "grad_norm": 3.199817312341191, + "learning_rate": 4.949242875102368e-07, + "loss": 0.5001, + "step": 17975 + }, + { + "epoch": 0.7417677642980935, + "grad_norm": 3.1845629164016622, + "learning_rate": 4.947754800331329e-07, + "loss": 0.4836, + "step": 17976 + }, + { + "epoch": 0.7418090286374515, + "grad_norm": 2.851884468185736, + "learning_rate": 4.946266905118542e-07, + "loss": 0.5199, + "step": 17977 + }, + { + "epoch": 0.7418502929768095, + "grad_norm": 3.96743809554683, + "learning_rate": 4.944779189490575e-07, + "loss": 0.4689, + "step": 17978 + }, + { + "epoch": 0.7418915573161674, + "grad_norm": 8.656032401195066, + "learning_rate": 4.943291653474004e-07, + "loss": 0.5275, + "step": 17979 + }, + { + "epoch": 0.7419328216555253, + "grad_norm": 4.85916532943298, + "learning_rate": 4.941804297095409e-07, + "loss": 0.548, + "step": 17980 + }, + { + "epoch": 0.7419740859948832, + "grad_norm": 2.1428445211676883, + "learning_rate": 4.940317120381344e-07, + "loss": 0.4948, + "step": 17981 + }, + { + "epoch": 0.7420153503342412, + "grad_norm": 2.6820404636272155, + "learning_rate": 4.938830123358382e-07, + "loss": 0.5026, + "step": 17982 + }, + { + "epoch": 0.7420566146735991, + "grad_norm": 3.3093340051113214, + "learning_rate": 4.937343306053089e-07, + "loss": 0.5212, + "step": 17983 + }, + { + "epoch": 0.742097879012957, + "grad_norm": 3.401826834155792, + "learning_rate": 4.93585666849201e-07, + "loss": 0.5, + "step": 17984 + }, + { + "epoch": 0.7421391433523149, + "grad_norm": 14.508769504274383, + "learning_rate": 4.934370210701714e-07, + "loss": 0.4584, + "step": 17985 + }, + { + "epoch": 0.7421804076916728, + "grad_norm": 10.470240881429685, + "learning_rate": 4.932883932708742e-07, + "loss": 0.5051, + "step": 17986 + }, + { + "epoch": 0.7422216720310307, + "grad_norm": 2.4755311453671993, + "learning_rate": 4.931397834539644e-07, + "loss": 0.5075, + "step": 17987 + }, + { + "epoch": 0.7422629363703888, + "grad_norm": 7.201527087389668, + "learning_rate": 4.929911916220973e-07, + "loss": 0.4616, + "step": 17988 + }, + { + "epoch": 0.7423042007097467, + "grad_norm": 3.063922135584142, + "learning_rate": 4.928426177779269e-07, + "loss": 0.5171, + "step": 17989 + }, + { + "epoch": 0.7423454650491046, + "grad_norm": 3.66128686043837, + "learning_rate": 4.926940619241064e-07, + "loss": 0.483, + "step": 17990 + }, + { + "epoch": 0.7423867293884625, + "grad_norm": 4.693055166181839, + "learning_rate": 4.925455240632898e-07, + "loss": 0.547, + "step": 17991 + }, + { + "epoch": 0.7424279937278204, + "grad_norm": 2.2126649687648534, + "learning_rate": 4.923970041981305e-07, + "loss": 0.5042, + "step": 17992 + }, + { + "epoch": 0.7424692580671783, + "grad_norm": 2.844190163712833, + "learning_rate": 4.922485023312817e-07, + "loss": 0.5224, + "step": 17993 + }, + { + "epoch": 0.7425105224065363, + "grad_norm": 3.201851502527551, + "learning_rate": 4.921000184653953e-07, + "loss": 0.4878, + "step": 17994 + }, + { + "epoch": 0.7425517867458942, + "grad_norm": 5.513657402356566, + "learning_rate": 4.91951552603124e-07, + "loss": 0.4699, + "step": 17995 + }, + { + "epoch": 0.7425930510852521, + "grad_norm": 3.0737063061827135, + "learning_rate": 4.9180310474712e-07, + "loss": 0.5258, + "step": 17996 + }, + { + "epoch": 0.74263431542461, + "grad_norm": 5.029536459578831, + "learning_rate": 4.916546749000349e-07, + "loss": 0.5116, + "step": 17997 + }, + { + "epoch": 0.742675579763968, + "grad_norm": 2.443080313119709, + "learning_rate": 4.915062630645192e-07, + "loss": 0.5222, + "step": 17998 + }, + { + "epoch": 0.742716844103326, + "grad_norm": 4.796574452519389, + "learning_rate": 4.913578692432246e-07, + "loss": 0.5103, + "step": 17999 + }, + { + "epoch": 0.7427581084426839, + "grad_norm": 2.912766092772959, + "learning_rate": 4.912094934388015e-07, + "loss": 0.5009, + "step": 18000 + }, + { + "epoch": 0.7427993727820418, + "grad_norm": 3.8514822957831627, + "learning_rate": 4.910611356539011e-07, + "loss": 0.5312, + "step": 18001 + }, + { + "epoch": 0.7428406371213997, + "grad_norm": 4.254737687555279, + "learning_rate": 4.909127958911722e-07, + "loss": 0.5634, + "step": 18002 + }, + { + "epoch": 0.7428819014607576, + "grad_norm": 2.335215776477867, + "learning_rate": 4.907644741532652e-07, + "loss": 0.4744, + "step": 18003 + }, + { + "epoch": 0.7429231658001155, + "grad_norm": 2.702478738814418, + "learning_rate": 4.906161704428294e-07, + "loss": 0.5015, + "step": 18004 + }, + { + "epoch": 0.7429644301394734, + "grad_norm": 2.723926300371035, + "learning_rate": 4.904678847625143e-07, + "loss": 0.5043, + "step": 18005 + }, + { + "epoch": 0.7430056944788314, + "grad_norm": 3.408404902397237, + "learning_rate": 4.903196171149678e-07, + "loss": 0.4854, + "step": 18006 + }, + { + "epoch": 0.7430469588181893, + "grad_norm": 2.690505492719241, + "learning_rate": 4.901713675028388e-07, + "loss": 0.5872, + "step": 18007 + }, + { + "epoch": 0.7430882231575473, + "grad_norm": 10.51297431191341, + "learning_rate": 4.900231359287755e-07, + "loss": 0.5222, + "step": 18008 + }, + { + "epoch": 0.7431294874969052, + "grad_norm": 2.5410464696408575, + "learning_rate": 4.898749223954257e-07, + "loss": 0.5052, + "step": 18009 + }, + { + "epoch": 0.7431707518362631, + "grad_norm": 9.002785838070885, + "learning_rate": 4.89726726905437e-07, + "loss": 0.5122, + "step": 18010 + }, + { + "epoch": 0.743212016175621, + "grad_norm": 3.060068060134522, + "learning_rate": 4.895785494614558e-07, + "loss": 0.4868, + "step": 18011 + }, + { + "epoch": 0.743253280514979, + "grad_norm": 2.7924757347962545, + "learning_rate": 4.894303900661293e-07, + "loss": 0.5119, + "step": 18012 + }, + { + "epoch": 0.7432945448543369, + "grad_norm": 6.248325539130894, + "learning_rate": 4.892822487221042e-07, + "loss": 0.5111, + "step": 18013 + }, + { + "epoch": 0.7433358091936948, + "grad_norm": 2.6910548281686903, + "learning_rate": 4.89134125432027e-07, + "loss": 0.4605, + "step": 18014 + }, + { + "epoch": 0.7433770735330527, + "grad_norm": 2.991699179717265, + "learning_rate": 4.889860201985425e-07, + "loss": 0.4603, + "step": 18015 + }, + { + "epoch": 0.7434183378724106, + "grad_norm": 2.39491523563365, + "learning_rate": 4.88837933024297e-07, + "loss": 0.5026, + "step": 18016 + }, + { + "epoch": 0.7434596022117685, + "grad_norm": 3.28266597023872, + "learning_rate": 4.886898639119359e-07, + "loss": 0.5099, + "step": 18017 + }, + { + "epoch": 0.7435008665511266, + "grad_norm": 3.791704566546315, + "learning_rate": 4.885418128641034e-07, + "loss": 0.5497, + "step": 18018 + }, + { + "epoch": 0.7435421308904845, + "grad_norm": 2.4780970338337345, + "learning_rate": 4.883937798834444e-07, + "loss": 0.5224, + "step": 18019 + }, + { + "epoch": 0.7435833952298424, + "grad_norm": 2.7145220265872427, + "learning_rate": 4.882457649726031e-07, + "loss": 0.5273, + "step": 18020 + }, + { + "epoch": 0.7436246595692003, + "grad_norm": 4.79131770920195, + "learning_rate": 4.88097768134224e-07, + "loss": 0.5052, + "step": 18021 + }, + { + "epoch": 0.7436659239085582, + "grad_norm": 2.8297171862076675, + "learning_rate": 4.879497893709495e-07, + "loss": 0.524, + "step": 18022 + }, + { + "epoch": 0.7437071882479162, + "grad_norm": 3.482099139151036, + "learning_rate": 4.878018286854241e-07, + "loss": 0.4886, + "step": 18023 + }, + { + "epoch": 0.7437484525872741, + "grad_norm": 2.5373248298087545, + "learning_rate": 4.876538860802898e-07, + "loss": 0.466, + "step": 18024 + }, + { + "epoch": 0.743789716926632, + "grad_norm": 4.611897104740132, + "learning_rate": 4.875059615581895e-07, + "loss": 0.5424, + "step": 18025 + }, + { + "epoch": 0.7438309812659899, + "grad_norm": 3.5222625301003565, + "learning_rate": 4.87358055121766e-07, + "loss": 0.5682, + "step": 18026 + }, + { + "epoch": 0.7438722456053478, + "grad_norm": 2.9989648732444456, + "learning_rate": 4.872101667736606e-07, + "loss": 0.5494, + "step": 18027 + }, + { + "epoch": 0.7439135099447057, + "grad_norm": 3.7060801087360224, + "learning_rate": 4.870622965165151e-07, + "loss": 0.5243, + "step": 18028 + }, + { + "epoch": 0.7439547742840638, + "grad_norm": 3.242516839702971, + "learning_rate": 4.869144443529708e-07, + "loss": 0.5517, + "step": 18029 + }, + { + "epoch": 0.7439960386234217, + "grad_norm": 2.3326908823103074, + "learning_rate": 4.867666102856697e-07, + "loss": 0.4931, + "step": 18030 + }, + { + "epoch": 0.7440373029627796, + "grad_norm": 3.8708318592310436, + "learning_rate": 4.86618794317251e-07, + "loss": 0.4944, + "step": 18031 + }, + { + "epoch": 0.7440785673021375, + "grad_norm": 2.778201178564837, + "learning_rate": 4.864709964503557e-07, + "loss": 0.481, + "step": 18032 + }, + { + "epoch": 0.7441198316414954, + "grad_norm": 3.062742293347211, + "learning_rate": 4.863232166876239e-07, + "loss": 0.5442, + "step": 18033 + }, + { + "epoch": 0.7441610959808533, + "grad_norm": 3.7144961778824874, + "learning_rate": 4.861754550316958e-07, + "loss": 0.5435, + "step": 18034 + }, + { + "epoch": 0.7442023603202113, + "grad_norm": 2.6497277406164437, + "learning_rate": 4.860277114852098e-07, + "loss": 0.4818, + "step": 18035 + }, + { + "epoch": 0.7442436246595692, + "grad_norm": 3.032426957954275, + "learning_rate": 4.858799860508059e-07, + "loss": 0.4914, + "step": 18036 + }, + { + "epoch": 0.7442848889989271, + "grad_norm": 3.0734688303612194, + "learning_rate": 4.857322787311218e-07, + "loss": 0.4996, + "step": 18037 + }, + { + "epoch": 0.744326153338285, + "grad_norm": 4.393843160396958, + "learning_rate": 4.855845895287972e-07, + "loss": 0.5317, + "step": 18038 + }, + { + "epoch": 0.744367417677643, + "grad_norm": 3.2769012929491046, + "learning_rate": 4.85436918446469e-07, + "loss": 0.5354, + "step": 18039 + }, + { + "epoch": 0.7444086820170009, + "grad_norm": 3.9692300949790766, + "learning_rate": 4.852892654867752e-07, + "loss": 0.5622, + "step": 18040 + }, + { + "epoch": 0.7444499463563589, + "grad_norm": 5.042836590913952, + "learning_rate": 4.851416306523538e-07, + "loss": 0.4844, + "step": 18041 + }, + { + "epoch": 0.7444912106957168, + "grad_norm": 2.709364756839081, + "learning_rate": 4.84994013945842e-07, + "loss": 0.5091, + "step": 18042 + }, + { + "epoch": 0.7445324750350747, + "grad_norm": 2.121313114643155, + "learning_rate": 4.84846415369876e-07, + "loss": 0.4737, + "step": 18043 + }, + { + "epoch": 0.7445737393744326, + "grad_norm": 2.811027257839096, + "learning_rate": 4.846988349270925e-07, + "loss": 0.4906, + "step": 18044 + }, + { + "epoch": 0.7446150037137905, + "grad_norm": 2.020926005806757, + "learning_rate": 4.845512726201276e-07, + "loss": 0.5036, + "step": 18045 + }, + { + "epoch": 0.7446562680531484, + "grad_norm": 8.221448221637703, + "learning_rate": 4.844037284516176e-07, + "loss": 0.4584, + "step": 18046 + }, + { + "epoch": 0.7446975323925064, + "grad_norm": 2.4839461488152845, + "learning_rate": 4.842562024241974e-07, + "loss": 0.5286, + "step": 18047 + }, + { + "epoch": 0.7447387967318643, + "grad_norm": 4.246040629773036, + "learning_rate": 4.841086945405022e-07, + "loss": 0.5098, + "step": 18048 + }, + { + "epoch": 0.7447800610712223, + "grad_norm": 3.4557023394486137, + "learning_rate": 4.839612048031677e-07, + "loss": 0.5421, + "step": 18049 + }, + { + "epoch": 0.7448213254105802, + "grad_norm": 4.237575064784018, + "learning_rate": 4.838137332148273e-07, + "loss": 0.4868, + "step": 18050 + }, + { + "epoch": 0.7448625897499381, + "grad_norm": 6.983483492420686, + "learning_rate": 4.836662797781163e-07, + "loss": 0.5497, + "step": 18051 + }, + { + "epoch": 0.744903854089296, + "grad_norm": 2.814045970670331, + "learning_rate": 4.835188444956674e-07, + "loss": 0.5117, + "step": 18052 + }, + { + "epoch": 0.744945118428654, + "grad_norm": 2.7632264921157264, + "learning_rate": 4.83371427370115e-07, + "loss": 0.5437, + "step": 18053 + }, + { + "epoch": 0.7449863827680119, + "grad_norm": 6.216573487526029, + "learning_rate": 4.832240284040919e-07, + "loss": 0.5284, + "step": 18054 + }, + { + "epoch": 0.7450276471073698, + "grad_norm": 3.925817573998081, + "learning_rate": 4.830766476002319e-07, + "loss": 0.4664, + "step": 18055 + }, + { + "epoch": 0.7450689114467277, + "grad_norm": 1.8585884054574433, + "learning_rate": 4.829292849611663e-07, + "loss": 0.521, + "step": 18056 + }, + { + "epoch": 0.7451101757860856, + "grad_norm": 3.2364475910078307, + "learning_rate": 4.827819404895281e-07, + "loss": 0.5814, + "step": 18057 + }, + { + "epoch": 0.7451514401254435, + "grad_norm": 8.402840619745064, + "learning_rate": 4.826346141879494e-07, + "loss": 0.4616, + "step": 18058 + }, + { + "epoch": 0.7451927044648016, + "grad_norm": 3.872114565263081, + "learning_rate": 4.824873060590612e-07, + "loss": 0.4937, + "step": 18059 + }, + { + "epoch": 0.7452339688041595, + "grad_norm": 5.249412757988957, + "learning_rate": 4.823400161054952e-07, + "loss": 0.527, + "step": 18060 + }, + { + "epoch": 0.7452752331435174, + "grad_norm": 3.7384529620423854, + "learning_rate": 4.821927443298823e-07, + "loss": 0.454, + "step": 18061 + }, + { + "epoch": 0.7453164974828753, + "grad_norm": 4.100504985359571, + "learning_rate": 4.820454907348535e-07, + "loss": 0.5042, + "step": 18062 + }, + { + "epoch": 0.7453577618222332, + "grad_norm": 16.868283613255628, + "learning_rate": 4.81898255323039e-07, + "loss": 0.5221, + "step": 18063 + }, + { + "epoch": 0.7453990261615911, + "grad_norm": 3.9034917626451775, + "learning_rate": 4.817510380970679e-07, + "loss": 0.5482, + "step": 18064 + }, + { + "epoch": 0.7454402905009491, + "grad_norm": 5.8472020295085265, + "learning_rate": 4.816038390595707e-07, + "loss": 0.4855, + "step": 18065 + }, + { + "epoch": 0.745481554840307, + "grad_norm": 2.325060782868276, + "learning_rate": 4.814566582131765e-07, + "loss": 0.5217, + "step": 18066 + }, + { + "epoch": 0.7455228191796649, + "grad_norm": 9.258558124571527, + "learning_rate": 4.813094955605147e-07, + "loss": 0.4872, + "step": 18067 + }, + { + "epoch": 0.7455640835190228, + "grad_norm": 5.122886479342013, + "learning_rate": 4.811623511042134e-07, + "loss": 0.5353, + "step": 18068 + }, + { + "epoch": 0.7456053478583808, + "grad_norm": 2.2230558123552755, + "learning_rate": 4.81015224846901e-07, + "loss": 0.5664, + "step": 18069 + }, + { + "epoch": 0.7456466121977388, + "grad_norm": 3.7942713627362963, + "learning_rate": 4.80868116791206e-07, + "loss": 0.5345, + "step": 18070 + }, + { + "epoch": 0.7456878765370967, + "grad_norm": 2.1747767213042524, + "learning_rate": 4.807210269397564e-07, + "loss": 0.5344, + "step": 18071 + }, + { + "epoch": 0.7457291408764546, + "grad_norm": 2.2738942953734846, + "learning_rate": 4.805739552951786e-07, + "loss": 0.5305, + "step": 18072 + }, + { + "epoch": 0.7457704052158125, + "grad_norm": 9.975037635734576, + "learning_rate": 4.804269018601002e-07, + "loss": 0.5559, + "step": 18073 + }, + { + "epoch": 0.7458116695551704, + "grad_norm": 2.5332492285581845, + "learning_rate": 4.802798666371478e-07, + "loss": 0.5441, + "step": 18074 + }, + { + "epoch": 0.7458529338945283, + "grad_norm": 3.066018498220826, + "learning_rate": 4.80132849628949e-07, + "loss": 0.5064, + "step": 18075 + }, + { + "epoch": 0.7458941982338863, + "grad_norm": 3.4758103943141143, + "learning_rate": 4.799858508381278e-07, + "loss": 0.4637, + "step": 18076 + }, + { + "epoch": 0.7459354625732442, + "grad_norm": 5.933066282436221, + "learning_rate": 4.79838870267311e-07, + "loss": 0.5049, + "step": 18077 + }, + { + "epoch": 0.7459767269126021, + "grad_norm": 2.248095401725941, + "learning_rate": 4.796919079191241e-07, + "loss": 0.5504, + "step": 18078 + }, + { + "epoch": 0.7460179912519601, + "grad_norm": 2.5046242928602873, + "learning_rate": 4.795449637961925e-07, + "loss": 0.5033, + "step": 18079 + }, + { + "epoch": 0.746059255591318, + "grad_norm": 3.315956346953773, + "learning_rate": 4.793980379011402e-07, + "loss": 0.533, + "step": 18080 + }, + { + "epoch": 0.7461005199306759, + "grad_norm": 7.6273463577538445, + "learning_rate": 4.792511302365922e-07, + "loss": 0.5451, + "step": 18081 + }, + { + "epoch": 0.7461417842700339, + "grad_norm": 2.5960953912174674, + "learning_rate": 4.791042408051724e-07, + "loss": 0.4426, + "step": 18082 + }, + { + "epoch": 0.7461830486093918, + "grad_norm": 3.32717851020165, + "learning_rate": 4.789573696095054e-07, + "loss": 0.4614, + "step": 18083 + }, + { + "epoch": 0.7462243129487497, + "grad_norm": 4.153919936483031, + "learning_rate": 4.788105166522134e-07, + "loss": 0.4596, + "step": 18084 + }, + { + "epoch": 0.7462655772881076, + "grad_norm": 8.07797472789753, + "learning_rate": 4.786636819359201e-07, + "loss": 0.5043, + "step": 18085 + }, + { + "epoch": 0.7463068416274655, + "grad_norm": 2.3529815247350396, + "learning_rate": 4.785168654632486e-07, + "loss": 0.5053, + "step": 18086 + }, + { + "epoch": 0.7463481059668234, + "grad_norm": 3.6077057728467237, + "learning_rate": 4.783700672368217e-07, + "loss": 0.5579, + "step": 18087 + }, + { + "epoch": 0.7463893703061814, + "grad_norm": 4.29664953307838, + "learning_rate": 4.782232872592612e-07, + "loss": 0.4665, + "step": 18088 + }, + { + "epoch": 0.7464306346455393, + "grad_norm": 4.354333474416511, + "learning_rate": 4.780765255331883e-07, + "loss": 0.5385, + "step": 18089 + }, + { + "epoch": 0.7464718989848973, + "grad_norm": 3.83380623803932, + "learning_rate": 4.779297820612252e-07, + "loss": 0.5501, + "step": 18090 + }, + { + "epoch": 0.7465131633242552, + "grad_norm": 3.158126310938509, + "learning_rate": 4.777830568459928e-07, + "loss": 0.4887, + "step": 18091 + }, + { + "epoch": 0.7465544276636131, + "grad_norm": 3.0864072452409617, + "learning_rate": 4.77636349890113e-07, + "loss": 0.5337, + "step": 18092 + }, + { + "epoch": 0.746595692002971, + "grad_norm": 4.544062098336353, + "learning_rate": 4.774896611962048e-07, + "loss": 0.4943, + "step": 18093 + }, + { + "epoch": 0.746636956342329, + "grad_norm": 2.3843036950010776, + "learning_rate": 4.773429907668893e-07, + "loss": 0.5114, + "step": 18094 + }, + { + "epoch": 0.7466782206816869, + "grad_norm": 3.7033587490931463, + "learning_rate": 4.771963386047867e-07, + "loss": 0.483, + "step": 18095 + }, + { + "epoch": 0.7467194850210448, + "grad_norm": 4.780646888864087, + "learning_rate": 4.770497047125158e-07, + "loss": 0.5258, + "step": 18096 + }, + { + "epoch": 0.7467607493604027, + "grad_norm": 2.979079729610741, + "learning_rate": 4.769030890926961e-07, + "loss": 0.4981, + "step": 18097 + }, + { + "epoch": 0.7468020136997606, + "grad_norm": 2.431063857568957, + "learning_rate": 4.767564917479467e-07, + "loss": 0.5599, + "step": 18098 + }, + { + "epoch": 0.7468432780391185, + "grad_norm": 6.295051431200801, + "learning_rate": 4.7660991268088646e-07, + "loss": 0.5429, + "step": 18099 + }, + { + "epoch": 0.7468845423784766, + "grad_norm": 4.011770481426779, + "learning_rate": 4.76463351894133e-07, + "loss": 0.4773, + "step": 18100 + }, + { + "epoch": 0.7469258067178345, + "grad_norm": 3.074971597279404, + "learning_rate": 4.763168093903045e-07, + "loss": 0.4536, + "step": 18101 + }, + { + "epoch": 0.7469670710571924, + "grad_norm": 2.6112860429897475, + "learning_rate": 4.7617028517201914e-07, + "loss": 0.4982, + "step": 18102 + }, + { + "epoch": 0.7470083353965503, + "grad_norm": 2.8197870420647937, + "learning_rate": 4.760237792418931e-07, + "loss": 0.4528, + "step": 18103 + }, + { + "epoch": 0.7470495997359082, + "grad_norm": 10.244468054432684, + "learning_rate": 4.7587729160254463e-07, + "loss": 0.4633, + "step": 18104 + }, + { + "epoch": 0.7470908640752661, + "grad_norm": 16.01599367617267, + "learning_rate": 4.757308222565891e-07, + "loss": 0.5006, + "step": 18105 + }, + { + "epoch": 0.7471321284146241, + "grad_norm": 3.628201825028489, + "learning_rate": 4.7558437120664343e-07, + "loss": 0.5376, + "step": 18106 + }, + { + "epoch": 0.747173392753982, + "grad_norm": 7.38470631279404, + "learning_rate": 4.7543793845532374e-07, + "loss": 0.5888, + "step": 18107 + }, + { + "epoch": 0.7472146570933399, + "grad_norm": 2.982565664699439, + "learning_rate": 4.7529152400524587e-07, + "loss": 0.4924, + "step": 18108 + }, + { + "epoch": 0.7472559214326978, + "grad_norm": 4.785487008004748, + "learning_rate": 4.751451278590243e-07, + "loss": 0.5741, + "step": 18109 + }, + { + "epoch": 0.7472971857720558, + "grad_norm": 2.669535954391931, + "learning_rate": 4.749987500192746e-07, + "loss": 0.5115, + "step": 18110 + }, + { + "epoch": 0.7473384501114138, + "grad_norm": 4.601411845491161, + "learning_rate": 4.748523904886114e-07, + "loss": 0.5287, + "step": 18111 + }, + { + "epoch": 0.7473797144507717, + "grad_norm": 8.66945975243083, + "learning_rate": 4.747060492696494e-07, + "loss": 0.522, + "step": 18112 + }, + { + "epoch": 0.7474209787901296, + "grad_norm": 6.640867754701149, + "learning_rate": 4.7455972636500195e-07, + "loss": 0.5261, + "step": 18113 + }, + { + "epoch": 0.7474622431294875, + "grad_norm": 4.142831255088292, + "learning_rate": 4.744134217772828e-07, + "loss": 0.4941, + "step": 18114 + }, + { + "epoch": 0.7475035074688454, + "grad_norm": 11.428364539390273, + "learning_rate": 4.742671355091062e-07, + "loss": 0.5098, + "step": 18115 + }, + { + "epoch": 0.7475447718082033, + "grad_norm": 2.177669599651265, + "learning_rate": 4.741208675630846e-07, + "loss": 0.5315, + "step": 18116 + }, + { + "epoch": 0.7475860361475613, + "grad_norm": 1.5235797350115874, + "learning_rate": 4.7397461794183014e-07, + "loss": 0.5048, + "step": 18117 + }, + { + "epoch": 0.7476273004869192, + "grad_norm": 2.1813245025771653, + "learning_rate": 4.738283866479557e-07, + "loss": 0.461, + "step": 18118 + }, + { + "epoch": 0.7476685648262771, + "grad_norm": 4.328389961490563, + "learning_rate": 4.7368217368407343e-07, + "loss": 0.5106, + "step": 18119 + }, + { + "epoch": 0.7477098291656351, + "grad_norm": 3.544280120136532, + "learning_rate": 4.7353597905279537e-07, + "loss": 0.5535, + "step": 18120 + }, + { + "epoch": 0.747751093504993, + "grad_norm": 3.81560605373697, + "learning_rate": 4.7338980275673204e-07, + "loss": 0.5148, + "step": 18121 + }, + { + "epoch": 0.7477923578443509, + "grad_norm": 2.616873059446829, + "learning_rate": 4.7324364479849516e-07, + "loss": 0.536, + "step": 18122 + }, + { + "epoch": 0.7478336221837089, + "grad_norm": 4.352539591425352, + "learning_rate": 4.7309750518069523e-07, + "loss": 0.4594, + "step": 18123 + }, + { + "epoch": 0.7478748865230668, + "grad_norm": 2.35528124975292, + "learning_rate": 4.7295138390594324e-07, + "loss": 0.5036, + "step": 18124 + }, + { + "epoch": 0.7479161508624247, + "grad_norm": 2.4910575255991865, + "learning_rate": 4.728052809768484e-07, + "loss": 0.5037, + "step": 18125 + }, + { + "epoch": 0.7479574152017826, + "grad_norm": 2.7108314690476676, + "learning_rate": 4.7265919639602093e-07, + "loss": 0.5128, + "step": 18126 + }, + { + "epoch": 0.7479986795411405, + "grad_norm": 5.665137253051264, + "learning_rate": 4.725131301660702e-07, + "loss": 0.5626, + "step": 18127 + }, + { + "epoch": 0.7480399438804984, + "grad_norm": 3.7673626882588778, + "learning_rate": 4.7236708228960564e-07, + "loss": 0.5599, + "step": 18128 + }, + { + "epoch": 0.7480812082198564, + "grad_norm": 4.245424653265552, + "learning_rate": 4.7222105276923586e-07, + "loss": 0.5253, + "step": 18129 + }, + { + "epoch": 0.7481224725592144, + "grad_norm": 10.937926843161469, + "learning_rate": 4.720750416075689e-07, + "loss": 0.5006, + "step": 18130 + }, + { + "epoch": 0.7481637368985723, + "grad_norm": 3.917772252426998, + "learning_rate": 4.71929048807213e-07, + "loss": 0.565, + "step": 18131 + }, + { + "epoch": 0.7482050012379302, + "grad_norm": 2.6052047566633934, + "learning_rate": 4.71783074370776e-07, + "loss": 0.4527, + "step": 18132 + }, + { + "epoch": 0.7482462655772881, + "grad_norm": 2.6214650483460904, + "learning_rate": 4.7163711830086614e-07, + "loss": 0.4968, + "step": 18133 + }, + { + "epoch": 0.748287529916646, + "grad_norm": 5.407703852901728, + "learning_rate": 4.714911806000893e-07, + "loss": 0.5369, + "step": 18134 + }, + { + "epoch": 0.748328794256004, + "grad_norm": 7.227494035714676, + "learning_rate": 4.7134526127105315e-07, + "loss": 0.5516, + "step": 18135 + }, + { + "epoch": 0.7483700585953619, + "grad_norm": 3.261704416626672, + "learning_rate": 4.711993603163642e-07, + "loss": 0.5146, + "step": 18136 + }, + { + "epoch": 0.7484113229347198, + "grad_norm": 2.574717113443778, + "learning_rate": 4.71053477738628e-07, + "loss": 0.5153, + "step": 18137 + }, + { + "epoch": 0.7484525872740777, + "grad_norm": 2.3387695890902633, + "learning_rate": 4.709076135404507e-07, + "loss": 0.5373, + "step": 18138 + }, + { + "epoch": 0.7484938516134356, + "grad_norm": 3.8439047678673206, + "learning_rate": 4.707617677244377e-07, + "loss": 0.475, + "step": 18139 + }, + { + "epoch": 0.7485351159527937, + "grad_norm": 6.258576871126654, + "learning_rate": 4.706159402931949e-07, + "loss": 0.4956, + "step": 18140 + }, + { + "epoch": 0.7485763802921516, + "grad_norm": 2.410312128427384, + "learning_rate": 4.704701312493264e-07, + "loss": 0.5607, + "step": 18141 + }, + { + "epoch": 0.7486176446315095, + "grad_norm": 4.040396165709518, + "learning_rate": 4.703243405954364e-07, + "loss": 0.4783, + "step": 18142 + }, + { + "epoch": 0.7486589089708674, + "grad_norm": 5.487186032212395, + "learning_rate": 4.7017856833412955e-07, + "loss": 0.521, + "step": 18143 + }, + { + "epoch": 0.7487001733102253, + "grad_norm": 3.1405073930207705, + "learning_rate": 4.700328144680097e-07, + "loss": 0.5248, + "step": 18144 + }, + { + "epoch": 0.7487414376495832, + "grad_norm": 2.7254229169609303, + "learning_rate": 4.6988707899968085e-07, + "loss": 0.5386, + "step": 18145 + }, + { + "epoch": 0.7487827019889411, + "grad_norm": 2.4119143278254564, + "learning_rate": 4.697413619317452e-07, + "loss": 0.4611, + "step": 18146 + }, + { + "epoch": 0.7488239663282991, + "grad_norm": 4.315821481187565, + "learning_rate": 4.695956632668062e-07, + "loss": 0.4554, + "step": 18147 + }, + { + "epoch": 0.748865230667657, + "grad_norm": 2.3756219476459357, + "learning_rate": 4.694499830074664e-07, + "loss": 0.4424, + "step": 18148 + }, + { + "epoch": 0.7489064950070149, + "grad_norm": 5.523957605358054, + "learning_rate": 4.693043211563282e-07, + "loss": 0.5402, + "step": 18149 + }, + { + "epoch": 0.7489477593463728, + "grad_norm": 3.1915217637822746, + "learning_rate": 4.691586777159929e-07, + "loss": 0.5223, + "step": 18150 + }, + { + "epoch": 0.7489890236857308, + "grad_norm": 2.0214220215427323, + "learning_rate": 4.690130526890624e-07, + "loss": 0.5072, + "step": 18151 + }, + { + "epoch": 0.7490302880250888, + "grad_norm": 2.995451060189419, + "learning_rate": 4.68867446078138e-07, + "loss": 0.476, + "step": 18152 + }, + { + "epoch": 0.7490715523644467, + "grad_norm": 16.548203528470214, + "learning_rate": 4.6872185788582085e-07, + "loss": 0.54, + "step": 18153 + }, + { + "epoch": 0.7491128167038046, + "grad_norm": 2.762213007120162, + "learning_rate": 4.685762881147111e-07, + "loss": 0.542, + "step": 18154 + }, + { + "epoch": 0.7491540810431625, + "grad_norm": 2.6708089285359313, + "learning_rate": 4.6843073676740866e-07, + "loss": 0.5552, + "step": 18155 + }, + { + "epoch": 0.7491953453825204, + "grad_norm": 2.4155715125611668, + "learning_rate": 4.6828520384651396e-07, + "loss": 0.5128, + "step": 18156 + }, + { + "epoch": 0.7492366097218783, + "grad_norm": 3.168014497726331, + "learning_rate": 4.681396893546269e-07, + "loss": 0.4777, + "step": 18157 + }, + { + "epoch": 0.7492778740612362, + "grad_norm": 3.181720603082819, + "learning_rate": 4.6799419329434576e-07, + "loss": 0.5396, + "step": 18158 + }, + { + "epoch": 0.7493191384005942, + "grad_norm": 2.608930608439527, + "learning_rate": 4.6784871566827013e-07, + "loss": 0.4526, + "step": 18159 + }, + { + "epoch": 0.7493604027399521, + "grad_norm": 3.4369003633603215, + "learning_rate": 4.677032564789984e-07, + "loss": 0.5023, + "step": 18160 + }, + { + "epoch": 0.7494016670793101, + "grad_norm": 3.096400784778593, + "learning_rate": 4.6755781572912935e-07, + "loss": 0.5412, + "step": 18161 + }, + { + "epoch": 0.749442931418668, + "grad_norm": 6.545857048057651, + "learning_rate": 4.6741239342126006e-07, + "loss": 0.5135, + "step": 18162 + }, + { + "epoch": 0.7494841957580259, + "grad_norm": 2.631875953830925, + "learning_rate": 4.6726698955798863e-07, + "loss": 0.5829, + "step": 18163 + }, + { + "epoch": 0.7495254600973839, + "grad_norm": 17.24926281856953, + "learning_rate": 4.671216041419121e-07, + "loss": 0.5232, + "step": 18164 + }, + { + "epoch": 0.7495667244367418, + "grad_norm": 3.17624449483826, + "learning_rate": 4.6697623717562834e-07, + "loss": 0.5123, + "step": 18165 + }, + { + "epoch": 0.7496079887760997, + "grad_norm": 4.216521291342263, + "learning_rate": 4.6683088866173247e-07, + "loss": 0.5324, + "step": 18166 + }, + { + "epoch": 0.7496492531154576, + "grad_norm": 2.582096924920411, + "learning_rate": 4.6668555860282216e-07, + "loss": 0.4555, + "step": 18167 + }, + { + "epoch": 0.7496905174548155, + "grad_norm": 2.520423118283544, + "learning_rate": 4.665402470014921e-07, + "loss": 0.457, + "step": 18168 + }, + { + "epoch": 0.7497317817941734, + "grad_norm": 12.46955643015523, + "learning_rate": 4.663949538603386e-07, + "loss": 0.4913, + "step": 18169 + }, + { + "epoch": 0.7497730461335314, + "grad_norm": 3.5465918580747857, + "learning_rate": 4.662496791819575e-07, + "loss": 0.4468, + "step": 18170 + }, + { + "epoch": 0.7498143104728894, + "grad_norm": 2.866300419061295, + "learning_rate": 4.6610442296894256e-07, + "loss": 0.528, + "step": 18171 + }, + { + "epoch": 0.7498555748122473, + "grad_norm": 3.6975478605752703, + "learning_rate": 4.6595918522388916e-07, + "loss": 0.4805, + "step": 18172 + }, + { + "epoch": 0.7498968391516052, + "grad_norm": 4.246504706190013, + "learning_rate": 4.6581396594939195e-07, + "loss": 0.4739, + "step": 18173 + }, + { + "epoch": 0.7499381034909631, + "grad_norm": 11.431169110968511, + "learning_rate": 4.656687651480438e-07, + "loss": 0.5345, + "step": 18174 + }, + { + "epoch": 0.749979367830321, + "grad_norm": 2.325008588090983, + "learning_rate": 4.655235828224392e-07, + "loss": 0.5116, + "step": 18175 + }, + { + "epoch": 0.750020632169679, + "grad_norm": 2.44349077908858, + "learning_rate": 4.6537841897517117e-07, + "loss": 0.5181, + "step": 18176 + }, + { + "epoch": 0.7500618965090369, + "grad_norm": 3.8028220846978873, + "learning_rate": 4.6523327360883326e-07, + "loss": 0.5157, + "step": 18177 + }, + { + "epoch": 0.7501031608483948, + "grad_norm": 2.6050278716669824, + "learning_rate": 4.650881467260174e-07, + "loss": 0.529, + "step": 18178 + }, + { + "epoch": 0.7501444251877527, + "grad_norm": 2.436261512832964, + "learning_rate": 4.64943038329316e-07, + "loss": 0.5075, + "step": 18179 + }, + { + "epoch": 0.7501856895271106, + "grad_norm": 3.0866168120768585, + "learning_rate": 4.6479794842132134e-07, + "loss": 0.5271, + "step": 18180 + }, + { + "epoch": 0.7502269538664686, + "grad_norm": 5.5309935593671264, + "learning_rate": 4.6465287700462536e-07, + "loss": 0.4865, + "step": 18181 + }, + { + "epoch": 0.7502682182058266, + "grad_norm": 10.130130990920842, + "learning_rate": 4.6450782408181906e-07, + "loss": 0.48, + "step": 18182 + }, + { + "epoch": 0.7503094825451845, + "grad_norm": 3.034991466759579, + "learning_rate": 4.64362789655493e-07, + "loss": 0.4769, + "step": 18183 + }, + { + "epoch": 0.7503507468845424, + "grad_norm": 5.126419597070341, + "learning_rate": 4.6421777372823836e-07, + "loss": 0.5393, + "step": 18184 + }, + { + "epoch": 0.7503920112239003, + "grad_norm": 3.8113138207987687, + "learning_rate": 4.6407277630264525e-07, + "loss": 0.4902, + "step": 18185 + }, + { + "epoch": 0.7504332755632582, + "grad_norm": 2.5207076406069975, + "learning_rate": 4.639277973813045e-07, + "loss": 0.4779, + "step": 18186 + }, + { + "epoch": 0.7504745399026161, + "grad_norm": 5.683031257526957, + "learning_rate": 4.637828369668046e-07, + "loss": 0.5606, + "step": 18187 + }, + { + "epoch": 0.7505158042419741, + "grad_norm": 2.6765159279343163, + "learning_rate": 4.6363789506173534e-07, + "loss": 0.4856, + "step": 18188 + }, + { + "epoch": 0.750557068581332, + "grad_norm": 4.304270478121354, + "learning_rate": 4.63492971668686e-07, + "loss": 0.5133, + "step": 18189 + }, + { + "epoch": 0.7505983329206899, + "grad_norm": 4.9509494809147885, + "learning_rate": 4.633480667902455e-07, + "loss": 0.5323, + "step": 18190 + }, + { + "epoch": 0.7506395972600479, + "grad_norm": 3.102194669305367, + "learning_rate": 4.6320318042900145e-07, + "loss": 0.487, + "step": 18191 + }, + { + "epoch": 0.7506808615994058, + "grad_norm": 3.7950336301068437, + "learning_rate": 4.630583125875421e-07, + "loss": 0.5087, + "step": 18192 + }, + { + "epoch": 0.7507221259387638, + "grad_norm": 3.559488126079959, + "learning_rate": 4.629134632684559e-07, + "loss": 0.5061, + "step": 18193 + }, + { + "epoch": 0.7507633902781217, + "grad_norm": 3.826585386800825, + "learning_rate": 4.627686324743298e-07, + "loss": 0.4819, + "step": 18194 + }, + { + "epoch": 0.7508046546174796, + "grad_norm": 2.996083146035914, + "learning_rate": 4.6262382020774995e-07, + "loss": 0.521, + "step": 18195 + }, + { + "epoch": 0.7508459189568375, + "grad_norm": 3.5298242217853573, + "learning_rate": 4.624790264713039e-07, + "loss": 0.5484, + "step": 18196 + }, + { + "epoch": 0.7508871832961954, + "grad_norm": 4.092843740039925, + "learning_rate": 4.6233425126757787e-07, + "loss": 0.5389, + "step": 18197 + }, + { + "epoch": 0.7509284476355533, + "grad_norm": 2.5739111561021604, + "learning_rate": 4.621894945991585e-07, + "loss": 0.4915, + "step": 18198 + }, + { + "epoch": 0.7509697119749112, + "grad_norm": 4.657595892959372, + "learning_rate": 4.620447564686305e-07, + "loss": 0.5499, + "step": 18199 + }, + { + "epoch": 0.7510109763142692, + "grad_norm": 2.8106737841629004, + "learning_rate": 4.6190003687857963e-07, + "loss": 0.5215, + "step": 18200 + }, + { + "epoch": 0.7510522406536272, + "grad_norm": 2.7424740309104787, + "learning_rate": 4.617553358315912e-07, + "loss": 0.5056, + "step": 18201 + }, + { + "epoch": 0.7510935049929851, + "grad_norm": 7.315106691915883, + "learning_rate": 4.616106533302501e-07, + "loss": 0.5579, + "step": 18202 + }, + { + "epoch": 0.751134769332343, + "grad_norm": 2.5707874398203177, + "learning_rate": 4.614659893771398e-07, + "loss": 0.4709, + "step": 18203 + }, + { + "epoch": 0.7511760336717009, + "grad_norm": 1.8469066842968165, + "learning_rate": 4.6132134397484504e-07, + "loss": 0.5342, + "step": 18204 + }, + { + "epoch": 0.7512172980110589, + "grad_norm": 2.996755688788118, + "learning_rate": 4.611767171259496e-07, + "loss": 0.4993, + "step": 18205 + }, + { + "epoch": 0.7512585623504168, + "grad_norm": 2.381365795185051, + "learning_rate": 4.6103210883303707e-07, + "loss": 0.5052, + "step": 18206 + }, + { + "epoch": 0.7512998266897747, + "grad_norm": 4.718628146946462, + "learning_rate": 4.608875190986901e-07, + "loss": 0.4805, + "step": 18207 + }, + { + "epoch": 0.7513410910291326, + "grad_norm": 2.45897148540059, + "learning_rate": 4.607429479254912e-07, + "loss": 0.4978, + "step": 18208 + }, + { + "epoch": 0.7513823553684905, + "grad_norm": 2.340461335381003, + "learning_rate": 4.605983953160229e-07, + "loss": 0.473, + "step": 18209 + }, + { + "epoch": 0.7514236197078484, + "grad_norm": 3.9891586333742683, + "learning_rate": 4.6045386127286754e-07, + "loss": 0.5054, + "step": 18210 + }, + { + "epoch": 0.7514648840472063, + "grad_norm": 2.8269490358335014, + "learning_rate": 4.603093457986073e-07, + "loss": 0.5906, + "step": 18211 + }, + { + "epoch": 0.7515061483865644, + "grad_norm": 3.449226304114601, + "learning_rate": 4.601648488958224e-07, + "loss": 0.467, + "step": 18212 + }, + { + "epoch": 0.7515474127259223, + "grad_norm": 4.368144586841004, + "learning_rate": 4.600203705670948e-07, + "loss": 0.5454, + "step": 18213 + }, + { + "epoch": 0.7515886770652802, + "grad_norm": 3.0219947854622777, + "learning_rate": 4.5987591081500525e-07, + "loss": 0.5461, + "step": 18214 + }, + { + "epoch": 0.7516299414046381, + "grad_norm": 2.2466287794459894, + "learning_rate": 4.5973146964213354e-07, + "loss": 0.4862, + "step": 18215 + }, + { + "epoch": 0.751671205743996, + "grad_norm": 9.322341080489105, + "learning_rate": 4.5958704705106024e-07, + "loss": 0.4764, + "step": 18216 + }, + { + "epoch": 0.751712470083354, + "grad_norm": 3.21488749432525, + "learning_rate": 4.5944264304436483e-07, + "loss": 0.4827, + "step": 18217 + }, + { + "epoch": 0.7517537344227119, + "grad_norm": 2.5064506159948463, + "learning_rate": 4.592982576246276e-07, + "loss": 0.5106, + "step": 18218 + }, + { + "epoch": 0.7517949987620698, + "grad_norm": 4.456537025625161, + "learning_rate": 4.5915389079442623e-07, + "loss": 0.5789, + "step": 18219 + }, + { + "epoch": 0.7518362631014277, + "grad_norm": 8.654516876592218, + "learning_rate": 4.590095425563407e-07, + "loss": 0.529, + "step": 18220 + }, + { + "epoch": 0.7518775274407856, + "grad_norm": 5.172561817254215, + "learning_rate": 4.588652129129485e-07, + "loss": 0.4569, + "step": 18221 + }, + { + "epoch": 0.7519187917801436, + "grad_norm": 6.99580052508014, + "learning_rate": 4.5872090186682796e-07, + "loss": 0.5207, + "step": 18222 + }, + { + "epoch": 0.7519600561195016, + "grad_norm": 2.4226006006251275, + "learning_rate": 4.5857660942055763e-07, + "loss": 0.4992, + "step": 18223 + }, + { + "epoch": 0.7520013204588595, + "grad_norm": 2.440234102281647, + "learning_rate": 4.584323355767137e-07, + "loss": 0.5247, + "step": 18224 + }, + { + "epoch": 0.7520425847982174, + "grad_norm": 2.801472642330278, + "learning_rate": 4.5828808033787385e-07, + "loss": 0.513, + "step": 18225 + }, + { + "epoch": 0.7520838491375753, + "grad_norm": 2.8631982281876174, + "learning_rate": 4.5814384370661496e-07, + "loss": 0.478, + "step": 18226 + }, + { + "epoch": 0.7521251134769332, + "grad_norm": 3.4278074449105866, + "learning_rate": 4.579996256855138e-07, + "loss": 0.5509, + "step": 18227 + }, + { + "epoch": 0.7521663778162911, + "grad_norm": 2.540610700220074, + "learning_rate": 4.578554262771455e-07, + "loss": 0.4773, + "step": 18228 + }, + { + "epoch": 0.7522076421556491, + "grad_norm": 2.4924988869134053, + "learning_rate": 4.577112454840863e-07, + "loss": 0.5228, + "step": 18229 + }, + { + "epoch": 0.752248906495007, + "grad_norm": 23.061415883702686, + "learning_rate": 4.5756708330891174e-07, + "loss": 0.5224, + "step": 18230 + }, + { + "epoch": 0.7522901708343649, + "grad_norm": 4.11746369057495, + "learning_rate": 4.574229397541973e-07, + "loss": 0.521, + "step": 18231 + }, + { + "epoch": 0.7523314351737229, + "grad_norm": 2.686415299391264, + "learning_rate": 4.572788148225168e-07, + "loss": 0.4822, + "step": 18232 + }, + { + "epoch": 0.7523726995130808, + "grad_norm": 4.406355063321901, + "learning_rate": 4.5713470851644557e-07, + "loss": 0.5162, + "step": 18233 + }, + { + "epoch": 0.7524139638524387, + "grad_norm": 4.192542090370077, + "learning_rate": 4.569906208385568e-07, + "loss": 0.5514, + "step": 18234 + }, + { + "epoch": 0.7524552281917967, + "grad_norm": 4.507038479736381, + "learning_rate": 4.5684655179142534e-07, + "loss": 0.5518, + "step": 18235 + }, + { + "epoch": 0.7524964925311546, + "grad_norm": 2.8754665152357486, + "learning_rate": 4.567025013776235e-07, + "loss": 0.4959, + "step": 18236 + }, + { + "epoch": 0.7525377568705125, + "grad_norm": 2.1619720267127387, + "learning_rate": 4.565584695997249e-07, + "loss": 0.5263, + "step": 18237 + }, + { + "epoch": 0.7525790212098704, + "grad_norm": 4.183065855113781, + "learning_rate": 4.564144564603024e-07, + "loss": 0.5075, + "step": 18238 + }, + { + "epoch": 0.7526202855492283, + "grad_norm": 2.2451614684322494, + "learning_rate": 4.562704619619289e-07, + "loss": 0.5094, + "step": 18239 + }, + { + "epoch": 0.7526615498885862, + "grad_norm": 20.753122817317173, + "learning_rate": 4.561264861071754e-07, + "loss": 0.5035, + "step": 18240 + }, + { + "epoch": 0.7527028142279442, + "grad_norm": 2.523772703224391, + "learning_rate": 4.559825288986141e-07, + "loss": 0.4834, + "step": 18241 + }, + { + "epoch": 0.7527440785673022, + "grad_norm": 3.300756150155417, + "learning_rate": 4.5583859033881673e-07, + "loss": 0.5318, + "step": 18242 + }, + { + "epoch": 0.7527853429066601, + "grad_norm": 2.9849560470612757, + "learning_rate": 4.556946704303547e-07, + "loss": 0.5105, + "step": 18243 + }, + { + "epoch": 0.752826607246018, + "grad_norm": 4.752794801193345, + "learning_rate": 4.555507691757977e-07, + "loss": 0.5257, + "step": 18244 + }, + { + "epoch": 0.7528678715853759, + "grad_norm": 5.070313261060459, + "learning_rate": 4.554068865777169e-07, + "loss": 0.4986, + "step": 18245 + }, + { + "epoch": 0.7529091359247339, + "grad_norm": 4.4531611603380155, + "learning_rate": 4.5526302263868255e-07, + "loss": 0.4855, + "step": 18246 + }, + { + "epoch": 0.7529504002640918, + "grad_norm": 4.213040879312274, + "learning_rate": 4.5511917736126383e-07, + "loss": 0.5236, + "step": 18247 + }, + { + "epoch": 0.7529916646034497, + "grad_norm": 5.8762989000281705, + "learning_rate": 4.54975350748031e-07, + "loss": 0.4981, + "step": 18248 + }, + { + "epoch": 0.7530329289428076, + "grad_norm": 1.6642978202175125, + "learning_rate": 4.5483154280155224e-07, + "loss": 0.4978, + "step": 18249 + }, + { + "epoch": 0.7530741932821655, + "grad_norm": 2.885384255363288, + "learning_rate": 4.546877535243966e-07, + "loss": 0.5165, + "step": 18250 + }, + { + "epoch": 0.7531154576215234, + "grad_norm": 3.20693142533663, + "learning_rate": 4.5454398291913317e-07, + "loss": 0.4668, + "step": 18251 + }, + { + "epoch": 0.7531567219608815, + "grad_norm": 3.2674363874998242, + "learning_rate": 4.5440023098832903e-07, + "loss": 0.4957, + "step": 18252 + }, + { + "epoch": 0.7531979863002394, + "grad_norm": 4.023197122273699, + "learning_rate": 4.542564977345525e-07, + "loss": 0.4882, + "step": 18253 + }, + { + "epoch": 0.7532392506395973, + "grad_norm": 2.6865715221596647, + "learning_rate": 4.5411278316037104e-07, + "loss": 0.5938, + "step": 18254 + }, + { + "epoch": 0.7532805149789552, + "grad_norm": 10.147013203077954, + "learning_rate": 4.53969087268352e-07, + "loss": 0.5472, + "step": 18255 + }, + { + "epoch": 0.7533217793183131, + "grad_norm": 4.244007278720317, + "learning_rate": 4.5382541006106145e-07, + "loss": 0.5785, + "step": 18256 + }, + { + "epoch": 0.753363043657671, + "grad_norm": 3.866595478281643, + "learning_rate": 4.5368175154106625e-07, + "loss": 0.5225, + "step": 18257 + }, + { + "epoch": 0.753404307997029, + "grad_norm": 4.475821283484174, + "learning_rate": 4.5353811171093233e-07, + "loss": 0.5525, + "step": 18258 + }, + { + "epoch": 0.7534455723363869, + "grad_norm": 6.609757368434715, + "learning_rate": 4.5339449057322605e-07, + "loss": 0.4816, + "step": 18259 + }, + { + "epoch": 0.7534868366757448, + "grad_norm": 3.060423027325421, + "learning_rate": 4.5325088813051253e-07, + "loss": 0.5409, + "step": 18260 + }, + { + "epoch": 0.7535281010151027, + "grad_norm": 3.282698356771362, + "learning_rate": 4.5310730438535613e-07, + "loss": 0.4695, + "step": 18261 + }, + { + "epoch": 0.7535693653544607, + "grad_norm": 5.964515016613657, + "learning_rate": 4.529637393403222e-07, + "loss": 0.4984, + "step": 18262 + }, + { + "epoch": 0.7536106296938186, + "grad_norm": 2.6690187505122265, + "learning_rate": 4.528201929979752e-07, + "loss": 0.4967, + "step": 18263 + }, + { + "epoch": 0.7536518940331766, + "grad_norm": 2.0215172232777423, + "learning_rate": 4.5267666536087963e-07, + "loss": 0.4222, + "step": 18264 + }, + { + "epoch": 0.7536931583725345, + "grad_norm": 21.41044186505516, + "learning_rate": 4.5253315643159844e-07, + "loss": 0.5498, + "step": 18265 + }, + { + "epoch": 0.7537344227118924, + "grad_norm": 4.440300349200227, + "learning_rate": 4.5238966621269526e-07, + "loss": 0.5126, + "step": 18266 + }, + { + "epoch": 0.7537756870512503, + "grad_norm": 2.42128258631825, + "learning_rate": 4.522461947067335e-07, + "loss": 0.4918, + "step": 18267 + }, + { + "epoch": 0.7538169513906082, + "grad_norm": 4.843798686177292, + "learning_rate": 4.521027419162762e-07, + "loss": 0.5295, + "step": 18268 + }, + { + "epoch": 0.7538582157299661, + "grad_norm": 2.688057876552099, + "learning_rate": 4.51959307843885e-07, + "loss": 0.4799, + "step": 18269 + }, + { + "epoch": 0.7538994800693241, + "grad_norm": 3.7343261150172653, + "learning_rate": 4.518158924921222e-07, + "loss": 0.5665, + "step": 18270 + }, + { + "epoch": 0.753940744408682, + "grad_norm": 5.388105099066381, + "learning_rate": 4.516724958635502e-07, + "loss": 0.5388, + "step": 18271 + }, + { + "epoch": 0.7539820087480399, + "grad_norm": 2.9301163085081523, + "learning_rate": 4.5152911796072964e-07, + "loss": 0.5339, + "step": 18272 + }, + { + "epoch": 0.7540232730873979, + "grad_norm": 3.120501442717414, + "learning_rate": 4.5138575878622216e-07, + "loss": 0.4754, + "step": 18273 + }, + { + "epoch": 0.7540645374267558, + "grad_norm": 6.371241230680294, + "learning_rate": 4.5124241834258787e-07, + "loss": 0.532, + "step": 18274 + }, + { + "epoch": 0.7541058017661137, + "grad_norm": 2.666901822755377, + "learning_rate": 4.510990966323877e-07, + "loss": 0.4643, + "step": 18275 + }, + { + "epoch": 0.7541470661054717, + "grad_norm": 2.752829022722498, + "learning_rate": 4.50955793658182e-07, + "loss": 0.5183, + "step": 18276 + }, + { + "epoch": 0.7541883304448296, + "grad_norm": 4.040448830585028, + "learning_rate": 4.508125094225297e-07, + "loss": 0.5212, + "step": 18277 + }, + { + "epoch": 0.7542295947841875, + "grad_norm": 2.8053582653335822, + "learning_rate": 4.5066924392799046e-07, + "loss": 0.5024, + "step": 18278 + }, + { + "epoch": 0.7542708591235454, + "grad_norm": 2.686227341288635, + "learning_rate": 4.5052599717712373e-07, + "loss": 0.5225, + "step": 18279 + }, + { + "epoch": 0.7543121234629033, + "grad_norm": 7.663117223455045, + "learning_rate": 4.503827691724884e-07, + "loss": 0.4826, + "step": 18280 + }, + { + "epoch": 0.7543533878022612, + "grad_norm": 3.2775467035048447, + "learning_rate": 4.5023955991664225e-07, + "loss": 0.5405, + "step": 18281 + }, + { + "epoch": 0.7543946521416192, + "grad_norm": 2.135505307794422, + "learning_rate": 4.5009636941214357e-07, + "loss": 0.5354, + "step": 18282 + }, + { + "epoch": 0.7544359164809772, + "grad_norm": 2.197791313528955, + "learning_rate": 4.4995319766155013e-07, + "loss": 0.49, + "step": 18283 + }, + { + "epoch": 0.7544771808203351, + "grad_norm": 3.3988217339036537, + "learning_rate": 4.498100446674199e-07, + "loss": 0.5049, + "step": 18284 + }, + { + "epoch": 0.754518445159693, + "grad_norm": 2.1313606685663187, + "learning_rate": 4.496669104323091e-07, + "loss": 0.5211, + "step": 18285 + }, + { + "epoch": 0.7545597094990509, + "grad_norm": 4.878396738272828, + "learning_rate": 4.4952379495877514e-07, + "loss": 0.5497, + "step": 18286 + }, + { + "epoch": 0.7546009738384089, + "grad_norm": 13.58299567780041, + "learning_rate": 4.4938069824937366e-07, + "loss": 0.5297, + "step": 18287 + }, + { + "epoch": 0.7546422381777668, + "grad_norm": 2.9270225886121914, + "learning_rate": 4.492376203066612e-07, + "loss": 0.4613, + "step": 18288 + }, + { + "epoch": 0.7546835025171247, + "grad_norm": 3.4664129095748355, + "learning_rate": 4.490945611331939e-07, + "loss": 0.5181, + "step": 18289 + }, + { + "epoch": 0.7547247668564826, + "grad_norm": 3.0335105005728624, + "learning_rate": 4.4895152073152626e-07, + "loss": 0.5451, + "step": 18290 + }, + { + "epoch": 0.7547660311958405, + "grad_norm": 4.265443339673657, + "learning_rate": 4.488084991042136e-07, + "loss": 0.4988, + "step": 18291 + }, + { + "epoch": 0.7548072955351984, + "grad_norm": 5.838196690451228, + "learning_rate": 4.4866549625381154e-07, + "loss": 0.4988, + "step": 18292 + }, + { + "epoch": 0.7548485598745565, + "grad_norm": 4.072517755931082, + "learning_rate": 4.485225121828731e-07, + "loss": 0.5324, + "step": 18293 + }, + { + "epoch": 0.7548898242139144, + "grad_norm": 4.578836334767245, + "learning_rate": 4.483795468939531e-07, + "loss": 0.5061, + "step": 18294 + }, + { + "epoch": 0.7549310885532723, + "grad_norm": 3.3607157728973664, + "learning_rate": 4.482366003896052e-07, + "loss": 0.478, + "step": 18295 + }, + { + "epoch": 0.7549723528926302, + "grad_norm": 4.690002184722886, + "learning_rate": 4.480936726723829e-07, + "loss": 0.5094, + "step": 18296 + }, + { + "epoch": 0.7550136172319881, + "grad_norm": 2.822814909282797, + "learning_rate": 4.4795076374483885e-07, + "loss": 0.5375, + "step": 18297 + }, + { + "epoch": 0.755054881571346, + "grad_norm": 4.577884029045487, + "learning_rate": 4.47807873609526e-07, + "loss": 0.5451, + "step": 18298 + }, + { + "epoch": 0.755096145910704, + "grad_norm": 4.563273151963415, + "learning_rate": 4.476650022689971e-07, + "loss": 0.5391, + "step": 18299 + }, + { + "epoch": 0.7551374102500619, + "grad_norm": 3.390607700556953, + "learning_rate": 4.4752214972580336e-07, + "loss": 0.5673, + "step": 18300 + }, + { + "epoch": 0.7551786745894198, + "grad_norm": 3.5404335777994835, + "learning_rate": 4.473793159824974e-07, + "loss": 0.4738, + "step": 18301 + }, + { + "epoch": 0.7552199389287777, + "grad_norm": 3.451686586737736, + "learning_rate": 4.472365010416296e-07, + "loss": 0.5232, + "step": 18302 + }, + { + "epoch": 0.7552612032681357, + "grad_norm": 5.876424029861692, + "learning_rate": 4.470937049057515e-07, + "loss": 0.5159, + "step": 18303 + }, + { + "epoch": 0.7553024676074936, + "grad_norm": 3.6113432909825804, + "learning_rate": 4.469509275774138e-07, + "loss": 0.5139, + "step": 18304 + }, + { + "epoch": 0.7553437319468516, + "grad_norm": 42.84729152344658, + "learning_rate": 4.4680816905916736e-07, + "loss": 0.5023, + "step": 18305 + }, + { + "epoch": 0.7553849962862095, + "grad_norm": 2.778317934049907, + "learning_rate": 4.4666542935356124e-07, + "loss": 0.5046, + "step": 18306 + }, + { + "epoch": 0.7554262606255674, + "grad_norm": 9.63287583869241, + "learning_rate": 4.465227084631455e-07, + "loss": 0.4926, + "step": 18307 + }, + { + "epoch": 0.7554675249649253, + "grad_norm": 2.600507806888421, + "learning_rate": 4.4638000639046967e-07, + "loss": 0.576, + "step": 18308 + }, + { + "epoch": 0.7555087893042832, + "grad_norm": 4.206192972529194, + "learning_rate": 4.4623732313808313e-07, + "loss": 0.4551, + "step": 18309 + }, + { + "epoch": 0.7555500536436411, + "grad_norm": 2.634135883300952, + "learning_rate": 4.4609465870853353e-07, + "loss": 0.5177, + "step": 18310 + }, + { + "epoch": 0.7555913179829991, + "grad_norm": 2.8779789108250964, + "learning_rate": 4.4595201310437007e-07, + "loss": 0.4953, + "step": 18311 + }, + { + "epoch": 0.755632582322357, + "grad_norm": 5.45671639310801, + "learning_rate": 4.4580938632814076e-07, + "loss": 0.4869, + "step": 18312 + }, + { + "epoch": 0.755673846661715, + "grad_norm": 7.0554141796522485, + "learning_rate": 4.4566677838239303e-07, + "loss": 0.4608, + "step": 18313 + }, + { + "epoch": 0.7557151110010729, + "grad_norm": 3.721097254128942, + "learning_rate": 4.455241892696737e-07, + "loss": 0.5532, + "step": 18314 + }, + { + "epoch": 0.7557563753404308, + "grad_norm": 6.205634151494793, + "learning_rate": 4.4538161899253015e-07, + "loss": 0.5063, + "step": 18315 + }, + { + "epoch": 0.7557976396797887, + "grad_norm": 7.179383686799087, + "learning_rate": 4.452390675535092e-07, + "loss": 0.4727, + "step": 18316 + }, + { + "epoch": 0.7558389040191467, + "grad_norm": 5.286791802190283, + "learning_rate": 4.4509653495515756e-07, + "loss": 0.5469, + "step": 18317 + }, + { + "epoch": 0.7558801683585046, + "grad_norm": 2.3234783508222465, + "learning_rate": 4.449540212000204e-07, + "loss": 0.4699, + "step": 18318 + }, + { + "epoch": 0.7559214326978625, + "grad_norm": 2.18284078625221, + "learning_rate": 4.4481152629064374e-07, + "loss": 0.5051, + "step": 18319 + }, + { + "epoch": 0.7559626970372204, + "grad_norm": 2.51964669310468, + "learning_rate": 4.4466905022957284e-07, + "loss": 0.5248, + "step": 18320 + }, + { + "epoch": 0.7560039613765783, + "grad_norm": 3.5481771713979957, + "learning_rate": 4.445265930193531e-07, + "loss": 0.4975, + "step": 18321 + }, + { + "epoch": 0.7560452257159362, + "grad_norm": 4.521919143692519, + "learning_rate": 4.443841546625284e-07, + "loss": 0.4494, + "step": 18322 + }, + { + "epoch": 0.7560864900552943, + "grad_norm": 3.2998812702766345, + "learning_rate": 4.4424173516164354e-07, + "loss": 0.5054, + "step": 18323 + }, + { + "epoch": 0.7561277543946522, + "grad_norm": 5.284935996443666, + "learning_rate": 4.440993345192424e-07, + "loss": 0.5321, + "step": 18324 + }, + { + "epoch": 0.7561690187340101, + "grad_norm": 3.802625496851248, + "learning_rate": 4.43956952737869e-07, + "loss": 0.4846, + "step": 18325 + }, + { + "epoch": 0.756210283073368, + "grad_norm": 3.86760687034498, + "learning_rate": 4.438145898200662e-07, + "loss": 0.4836, + "step": 18326 + }, + { + "epoch": 0.7562515474127259, + "grad_norm": 2.6979467902205863, + "learning_rate": 4.436722457683765e-07, + "loss": 0.5234, + "step": 18327 + }, + { + "epoch": 0.7562928117520838, + "grad_norm": 2.8025228049366415, + "learning_rate": 4.4352992058534315e-07, + "loss": 0.5501, + "step": 18328 + }, + { + "epoch": 0.7563340760914418, + "grad_norm": 2.4990526168825795, + "learning_rate": 4.433876142735087e-07, + "loss": 0.4829, + "step": 18329 + }, + { + "epoch": 0.7563753404307997, + "grad_norm": 3.019010224393489, + "learning_rate": 4.4324532683541434e-07, + "loss": 0.5026, + "step": 18330 + }, + { + "epoch": 0.7564166047701576, + "grad_norm": 4.519105921673834, + "learning_rate": 4.431030582736019e-07, + "loss": 0.5155, + "step": 18331 + }, + { + "epoch": 0.7564578691095155, + "grad_norm": 2.436505749598674, + "learning_rate": 4.4296080859061274e-07, + "loss": 0.4759, + "step": 18332 + }, + { + "epoch": 0.7564991334488734, + "grad_norm": 2.713129423170892, + "learning_rate": 4.4281857778898855e-07, + "loss": 0.5251, + "step": 18333 + }, + { + "epoch": 0.7565403977882315, + "grad_norm": 5.506265099113171, + "learning_rate": 4.4267636587126845e-07, + "loss": 0.5323, + "step": 18334 + }, + { + "epoch": 0.7565816621275894, + "grad_norm": 3.0375933396819534, + "learning_rate": 4.4253417283999365e-07, + "loss": 0.5369, + "step": 18335 + }, + { + "epoch": 0.7566229264669473, + "grad_norm": 7.524770990197748, + "learning_rate": 4.4239199869770387e-07, + "loss": 0.5258, + "step": 18336 + }, + { + "epoch": 0.7566641908063052, + "grad_norm": 2.6884458658837156, + "learning_rate": 4.4224984344693926e-07, + "loss": 0.5421, + "step": 18337 + }, + { + "epoch": 0.7567054551456631, + "grad_norm": 3.6377329375122924, + "learning_rate": 4.4210770709023847e-07, + "loss": 0.4945, + "step": 18338 + }, + { + "epoch": 0.756746719485021, + "grad_norm": 5.160165436001765, + "learning_rate": 4.4196558963013996e-07, + "loss": 0.5362, + "step": 18339 + }, + { + "epoch": 0.756787983824379, + "grad_norm": 3.2778754689798917, + "learning_rate": 4.418234910691828e-07, + "loss": 0.4939, + "step": 18340 + }, + { + "epoch": 0.7568292481637369, + "grad_norm": 3.7450215926270385, + "learning_rate": 4.4168141140990545e-07, + "loss": 0.5244, + "step": 18341 + }, + { + "epoch": 0.7568705125030948, + "grad_norm": 2.3960109050375986, + "learning_rate": 4.41539350654846e-07, + "loss": 0.5056, + "step": 18342 + }, + { + "epoch": 0.7569117768424527, + "grad_norm": 3.358794195785139, + "learning_rate": 4.4139730880654096e-07, + "loss": 0.4906, + "step": 18343 + }, + { + "epoch": 0.7569530411818107, + "grad_norm": 2.9739764826115636, + "learning_rate": 4.4125528586752825e-07, + "loss": 0.493, + "step": 18344 + }, + { + "epoch": 0.7569943055211686, + "grad_norm": 2.9240747793960873, + "learning_rate": 4.411132818403448e-07, + "loss": 0.5247, + "step": 18345 + }, + { + "epoch": 0.7570355698605266, + "grad_norm": 3.482330181344495, + "learning_rate": 4.4097129672752756e-07, + "loss": 0.5395, + "step": 18346 + }, + { + "epoch": 0.7570768341998845, + "grad_norm": 3.4200668638059466, + "learning_rate": 4.408293305316117e-07, + "loss": 0.4433, + "step": 18347 + }, + { + "epoch": 0.7571180985392424, + "grad_norm": 6.43183056674756, + "learning_rate": 4.4068738325513357e-07, + "loss": 0.4773, + "step": 18348 + }, + { + "epoch": 0.7571593628786003, + "grad_norm": 3.0825266326306133, + "learning_rate": 4.405454549006294e-07, + "loss": 0.495, + "step": 18349 + }, + { + "epoch": 0.7572006272179582, + "grad_norm": 2.2052180703799675, + "learning_rate": 4.404035454706332e-07, + "loss": 0.458, + "step": 18350 + }, + { + "epoch": 0.7572418915573161, + "grad_norm": 2.2759616036864525, + "learning_rate": 4.402616549676808e-07, + "loss": 0.5789, + "step": 18351 + }, + { + "epoch": 0.757283155896674, + "grad_norm": 2.635903463743602, + "learning_rate": 4.401197833943059e-07, + "loss": 0.4612, + "step": 18352 + }, + { + "epoch": 0.757324420236032, + "grad_norm": 3.8142875136335395, + "learning_rate": 4.399779307530431e-07, + "loss": 0.5116, + "step": 18353 + }, + { + "epoch": 0.75736568457539, + "grad_norm": 4.089069717154931, + "learning_rate": 4.3983609704642665e-07, + "loss": 0.5309, + "step": 18354 + }, + { + "epoch": 0.7574069489147479, + "grad_norm": 2.1111895907829887, + "learning_rate": 4.3969428227698915e-07, + "loss": 0.4639, + "step": 18355 + }, + { + "epoch": 0.7574482132541058, + "grad_norm": 2.340446083396095, + "learning_rate": 4.395524864472643e-07, + "loss": 0.5664, + "step": 18356 + }, + { + "epoch": 0.7574894775934637, + "grad_norm": 2.8946540101910028, + "learning_rate": 4.3941070955978493e-07, + "loss": 0.5053, + "step": 18357 + }, + { + "epoch": 0.7575307419328217, + "grad_norm": 2.724063685065346, + "learning_rate": 4.3926895161708377e-07, + "loss": 0.5256, + "step": 18358 + }, + { + "epoch": 0.7575720062721796, + "grad_norm": 2.6696632834424348, + "learning_rate": 4.391272126216923e-07, + "loss": 0.4919, + "step": 18359 + }, + { + "epoch": 0.7576132706115375, + "grad_norm": 2.3055899842513945, + "learning_rate": 4.3898549257614275e-07, + "loss": 0.4945, + "step": 18360 + }, + { + "epoch": 0.7576545349508954, + "grad_norm": 3.4324806091525275, + "learning_rate": 4.388437914829665e-07, + "loss": 0.5824, + "step": 18361 + }, + { + "epoch": 0.7576957992902533, + "grad_norm": 22.984108605300843, + "learning_rate": 4.3870210934469525e-07, + "loss": 0.5467, + "step": 18362 + }, + { + "epoch": 0.7577370636296112, + "grad_norm": 2.5931745803665485, + "learning_rate": 4.3856044616385874e-07, + "loss": 0.506, + "step": 18363 + }, + { + "epoch": 0.7577783279689693, + "grad_norm": 3.0214036173886982, + "learning_rate": 4.3841880194298804e-07, + "loss": 0.5372, + "step": 18364 + }, + { + "epoch": 0.7578195923083272, + "grad_norm": 2.4653819474014123, + "learning_rate": 4.3827717668461375e-07, + "loss": 0.5278, + "step": 18365 + }, + { + "epoch": 0.7578608566476851, + "grad_norm": 2.5649012572854595, + "learning_rate": 4.3813557039126453e-07, + "loss": 0.5055, + "step": 18366 + }, + { + "epoch": 0.757902120987043, + "grad_norm": 2.6924368689589193, + "learning_rate": 4.3799398306547095e-07, + "loss": 0.4848, + "step": 18367 + }, + { + "epoch": 0.7579433853264009, + "grad_norm": 4.300826901165558, + "learning_rate": 4.3785241470976087e-07, + "loss": 0.4619, + "step": 18368 + }, + { + "epoch": 0.7579846496657588, + "grad_norm": 2.6569086509824986, + "learning_rate": 4.37710865326664e-07, + "loss": 0.5018, + "step": 18369 + }, + { + "epoch": 0.7580259140051168, + "grad_norm": 2.341338660159331, + "learning_rate": 4.3756933491870886e-07, + "loss": 0.5718, + "step": 18370 + }, + { + "epoch": 0.7580671783444747, + "grad_norm": 8.158459991334505, + "learning_rate": 4.3742782348842287e-07, + "loss": 0.5328, + "step": 18371 + }, + { + "epoch": 0.7581084426838326, + "grad_norm": 7.12292549663596, + "learning_rate": 4.3728633103833396e-07, + "loss": 0.4936, + "step": 18372 + }, + { + "epoch": 0.7581497070231905, + "grad_norm": 3.3363217834427057, + "learning_rate": 4.371448575709697e-07, + "loss": 0.5341, + "step": 18373 + }, + { + "epoch": 0.7581909713625485, + "grad_norm": 2.938039448419022, + "learning_rate": 4.3700340308885774e-07, + "loss": 0.5251, + "step": 18374 + }, + { + "epoch": 0.7582322357019065, + "grad_norm": 2.1243520781481817, + "learning_rate": 4.368619675945236e-07, + "loss": 0.5223, + "step": 18375 + }, + { + "epoch": 0.7582735000412644, + "grad_norm": 7.465791678455923, + "learning_rate": 4.3672055109049424e-07, + "loss": 0.4861, + "step": 18376 + }, + { + "epoch": 0.7583147643806223, + "grad_norm": 4.462104392996428, + "learning_rate": 4.365791535792959e-07, + "loss": 0.4824, + "step": 18377 + }, + { + "epoch": 0.7583560287199802, + "grad_norm": 3.12274833821147, + "learning_rate": 4.364377750634545e-07, + "loss": 0.5067, + "step": 18378 + }, + { + "epoch": 0.7583972930593381, + "grad_norm": 1.8397268504226385, + "learning_rate": 4.36296415545495e-07, + "loss": 0.5016, + "step": 18379 + }, + { + "epoch": 0.758438557398696, + "grad_norm": 2.333951368112357, + "learning_rate": 4.3615507502794217e-07, + "loss": 0.494, + "step": 18380 + }, + { + "epoch": 0.758479821738054, + "grad_norm": 3.1371496700227905, + "learning_rate": 4.3601375351332103e-07, + "loss": 0.4988, + "step": 18381 + }, + { + "epoch": 0.7585210860774119, + "grad_norm": 4.323285771941499, + "learning_rate": 4.358724510041559e-07, + "loss": 0.5226, + "step": 18382 + }, + { + "epoch": 0.7585623504167698, + "grad_norm": 3.1826898160089856, + "learning_rate": 4.3573116750297134e-07, + "loss": 0.5126, + "step": 18383 + }, + { + "epoch": 0.7586036147561278, + "grad_norm": 2.4463278237735953, + "learning_rate": 4.355899030122901e-07, + "loss": 0.4997, + "step": 18384 + }, + { + "epoch": 0.7586448790954857, + "grad_norm": 2.5098724071072436, + "learning_rate": 4.354486575346358e-07, + "loss": 0.4803, + "step": 18385 + }, + { + "epoch": 0.7586861434348436, + "grad_norm": 6.758452018293524, + "learning_rate": 4.353074310725317e-07, + "loss": 0.5208, + "step": 18386 + }, + { + "epoch": 0.7587274077742016, + "grad_norm": 7.779444795231986, + "learning_rate": 4.351662236285007e-07, + "loss": 0.5188, + "step": 18387 + }, + { + "epoch": 0.7587686721135595, + "grad_norm": 2.4066225772363308, + "learning_rate": 4.350250352050643e-07, + "loss": 0.5237, + "step": 18388 + }, + { + "epoch": 0.7588099364529174, + "grad_norm": 3.918310054642312, + "learning_rate": 4.348838658047449e-07, + "loss": 0.5135, + "step": 18389 + }, + { + "epoch": 0.7588512007922753, + "grad_norm": 3.8324474963927173, + "learning_rate": 4.3474271543006466e-07, + "loss": 0.5369, + "step": 18390 + }, + { + "epoch": 0.7588924651316332, + "grad_norm": 2.6424435367055823, + "learning_rate": 4.3460158408354446e-07, + "loss": 0.5542, + "step": 18391 + }, + { + "epoch": 0.7589337294709911, + "grad_norm": 6.208569166663342, + "learning_rate": 4.3446047176770477e-07, + "loss": 0.5031, + "step": 18392 + }, + { + "epoch": 0.758974993810349, + "grad_norm": 2.272908288798669, + "learning_rate": 4.343193784850664e-07, + "loss": 0.4813, + "step": 18393 + }, + { + "epoch": 0.759016258149707, + "grad_norm": 6.7496513495741075, + "learning_rate": 4.3417830423815007e-07, + "loss": 0.4874, + "step": 18394 + }, + { + "epoch": 0.759057522489065, + "grad_norm": 8.88562893708234, + "learning_rate": 4.340372490294759e-07, + "loss": 0.4319, + "step": 18395 + }, + { + "epoch": 0.7590987868284229, + "grad_norm": 2.6246777165884403, + "learning_rate": 4.3389621286156264e-07, + "loss": 0.5482, + "step": 18396 + }, + { + "epoch": 0.7591400511677808, + "grad_norm": 8.443992266057014, + "learning_rate": 4.337551957369299e-07, + "loss": 0.4983, + "step": 18397 + }, + { + "epoch": 0.7591813155071387, + "grad_norm": 2.0958361233408778, + "learning_rate": 4.336141976580969e-07, + "loss": 0.5189, + "step": 18398 + }, + { + "epoch": 0.7592225798464967, + "grad_norm": 5.693238913694608, + "learning_rate": 4.3347321862758233e-07, + "loss": 0.5079, + "step": 18399 + }, + { + "epoch": 0.7592638441858546, + "grad_norm": 2.9924112830037806, + "learning_rate": 4.3333225864790367e-07, + "loss": 0.4953, + "step": 18400 + }, + { + "epoch": 0.7593051085252125, + "grad_norm": 3.076100131588363, + "learning_rate": 4.331913177215792e-07, + "loss": 0.5075, + "step": 18401 + }, + { + "epoch": 0.7593463728645704, + "grad_norm": 3.324766065370057, + "learning_rate": 4.3305039585112664e-07, + "loss": 0.5259, + "step": 18402 + }, + { + "epoch": 0.7593876372039283, + "grad_norm": 4.778303920188395, + "learning_rate": 4.329094930390636e-07, + "loss": 0.5329, + "step": 18403 + }, + { + "epoch": 0.7594289015432862, + "grad_norm": 3.4512726441373953, + "learning_rate": 4.3276860928790646e-07, + "loss": 0.4884, + "step": 18404 + }, + { + "epoch": 0.7594701658826443, + "grad_norm": 6.688080189446915, + "learning_rate": 4.326277446001711e-07, + "loss": 0.5603, + "step": 18405 + }, + { + "epoch": 0.7595114302220022, + "grad_norm": 13.177752525418061, + "learning_rate": 4.324868989783746e-07, + "loss": 0.5085, + "step": 18406 + }, + { + "epoch": 0.7595526945613601, + "grad_norm": 3.1952682062600606, + "learning_rate": 4.323460724250331e-07, + "loss": 0.4703, + "step": 18407 + }, + { + "epoch": 0.759593958900718, + "grad_norm": 4.995506777069658, + "learning_rate": 4.3220526494266106e-07, + "loss": 0.5264, + "step": 18408 + }, + { + "epoch": 0.7596352232400759, + "grad_norm": 3.5667597330381757, + "learning_rate": 4.320644765337741e-07, + "loss": 0.4973, + "step": 18409 + }, + { + "epoch": 0.7596764875794338, + "grad_norm": 2.352303390861348, + "learning_rate": 4.319237072008871e-07, + "loss": 0.5382, + "step": 18410 + }, + { + "epoch": 0.7597177519187918, + "grad_norm": 2.4225471888000207, + "learning_rate": 4.3178295694651527e-07, + "loss": 0.5352, + "step": 18411 + }, + { + "epoch": 0.7597590162581497, + "grad_norm": 2.784786732039645, + "learning_rate": 4.3164222577317166e-07, + "loss": 0.5143, + "step": 18412 + }, + { + "epoch": 0.7598002805975076, + "grad_norm": 3.781073439484565, + "learning_rate": 4.315015136833704e-07, + "loss": 0.473, + "step": 18413 + }, + { + "epoch": 0.7598415449368655, + "grad_norm": 2.7487788376976376, + "learning_rate": 4.313608206796251e-07, + "loss": 0.5391, + "step": 18414 + }, + { + "epoch": 0.7598828092762235, + "grad_norm": 3.4493174827792177, + "learning_rate": 4.3122014676444937e-07, + "loss": 0.4494, + "step": 18415 + }, + { + "epoch": 0.7599240736155815, + "grad_norm": 2.6000019270527375, + "learning_rate": 4.31079491940355e-07, + "loss": 0.5527, + "step": 18416 + }, + { + "epoch": 0.7599653379549394, + "grad_norm": 4.575393247097707, + "learning_rate": 4.309388562098553e-07, + "loss": 0.5457, + "step": 18417 + }, + { + "epoch": 0.7600066022942973, + "grad_norm": 4.453031136901688, + "learning_rate": 4.3079823957546173e-07, + "loss": 0.5205, + "step": 18418 + }, + { + "epoch": 0.7600478666336552, + "grad_norm": 5.958075889355969, + "learning_rate": 4.306576420396863e-07, + "loss": 0.5432, + "step": 18419 + }, + { + "epoch": 0.7600891309730131, + "grad_norm": 2.3132991852285705, + "learning_rate": 4.3051706360504083e-07, + "loss": 0.4729, + "step": 18420 + }, + { + "epoch": 0.760130395312371, + "grad_norm": 4.0348452194050095, + "learning_rate": 4.303765042740356e-07, + "loss": 0.5367, + "step": 18421 + }, + { + "epoch": 0.760171659651729, + "grad_norm": 2.5250291361178903, + "learning_rate": 4.302359640491818e-07, + "loss": 0.5697, + "step": 18422 + }, + { + "epoch": 0.7602129239910869, + "grad_norm": 2.1814468936528004, + "learning_rate": 4.3009544293298985e-07, + "loss": 0.5607, + "step": 18423 + }, + { + "epoch": 0.7602541883304448, + "grad_norm": 3.020608026588458, + "learning_rate": 4.299549409279702e-07, + "loss": 0.5376, + "step": 18424 + }, + { + "epoch": 0.7602954526698028, + "grad_norm": 2.022417719186217, + "learning_rate": 4.298144580366317e-07, + "loss": 0.4865, + "step": 18425 + }, + { + "epoch": 0.7603367170091607, + "grad_norm": 3.7419114175826076, + "learning_rate": 4.296739942614842e-07, + "loss": 0.5051, + "step": 18426 + }, + { + "epoch": 0.7603779813485186, + "grad_norm": 4.029541913086834, + "learning_rate": 4.295335496050373e-07, + "loss": 0.4981, + "step": 18427 + }, + { + "epoch": 0.7604192456878766, + "grad_norm": 3.773454492173462, + "learning_rate": 4.293931240697985e-07, + "loss": 0.5616, + "step": 18428 + }, + { + "epoch": 0.7604605100272345, + "grad_norm": 2.6865687505924662, + "learning_rate": 4.292527176582766e-07, + "loss": 0.4983, + "step": 18429 + }, + { + "epoch": 0.7605017743665924, + "grad_norm": 3.94883704398053, + "learning_rate": 4.291123303729805e-07, + "loss": 0.5697, + "step": 18430 + }, + { + "epoch": 0.7605430387059503, + "grad_norm": 5.631422869621043, + "learning_rate": 4.2897196221641653e-07, + "loss": 0.464, + "step": 18431 + }, + { + "epoch": 0.7605843030453082, + "grad_norm": 2.245354087760596, + "learning_rate": 4.2883161319109316e-07, + "loss": 0.5085, + "step": 18432 + }, + { + "epoch": 0.7606255673846661, + "grad_norm": 11.481330904851784, + "learning_rate": 4.286912832995162e-07, + "loss": 0.5607, + "step": 18433 + }, + { + "epoch": 0.760666831724024, + "grad_norm": 4.3750502421733986, + "learning_rate": 4.2855097254419314e-07, + "loss": 0.492, + "step": 18434 + }, + { + "epoch": 0.7607080960633821, + "grad_norm": 2.8960177888141296, + "learning_rate": 4.284106809276298e-07, + "loss": 0.5513, + "step": 18435 + }, + { + "epoch": 0.76074936040274, + "grad_norm": 9.12635530752907, + "learning_rate": 4.2827040845233307e-07, + "loss": 0.5365, + "step": 18436 + }, + { + "epoch": 0.7607906247420979, + "grad_norm": 2.815275411950578, + "learning_rate": 4.281301551208074e-07, + "loss": 0.4465, + "step": 18437 + }, + { + "epoch": 0.7608318890814558, + "grad_norm": 2.9043748073606204, + "learning_rate": 4.279899209355586e-07, + "loss": 0.5522, + "step": 18438 + }, + { + "epoch": 0.7608731534208137, + "grad_norm": 4.576946910257167, + "learning_rate": 4.2784970589909154e-07, + "loss": 0.479, + "step": 18439 + }, + { + "epoch": 0.7609144177601717, + "grad_norm": 3.0657037183817697, + "learning_rate": 4.277095100139112e-07, + "loss": 0.5248, + "step": 18440 + }, + { + "epoch": 0.7609556820995296, + "grad_norm": 3.4529924262251304, + "learning_rate": 4.2756933328252114e-07, + "loss": 0.4876, + "step": 18441 + }, + { + "epoch": 0.7609969464388875, + "grad_norm": 4.601814233063654, + "learning_rate": 4.274291757074256e-07, + "loss": 0.4614, + "step": 18442 + }, + { + "epoch": 0.7610382107782454, + "grad_norm": 2.8574711572328106, + "learning_rate": 4.2728903729112857e-07, + "loss": 0.4395, + "step": 18443 + }, + { + "epoch": 0.7610794751176033, + "grad_norm": 3.9256350085952594, + "learning_rate": 4.2714891803613226e-07, + "loss": 0.4332, + "step": 18444 + }, + { + "epoch": 0.7611207394569613, + "grad_norm": 3.7419613462075567, + "learning_rate": 4.2700881794494086e-07, + "loss": 0.5217, + "step": 18445 + }, + { + "epoch": 0.7611620037963193, + "grad_norm": 2.7231307510248115, + "learning_rate": 4.2686873702005545e-07, + "loss": 0.5347, + "step": 18446 + }, + { + "epoch": 0.7612032681356772, + "grad_norm": 4.32518567163946, + "learning_rate": 4.267286752639791e-07, + "loss": 0.5063, + "step": 18447 + }, + { + "epoch": 0.7612445324750351, + "grad_norm": 2.5272028356499927, + "learning_rate": 4.26588632679214e-07, + "loss": 0.5528, + "step": 18448 + }, + { + "epoch": 0.761285796814393, + "grad_norm": 3.813557993489062, + "learning_rate": 4.2644860926826073e-07, + "loss": 0.5113, + "step": 18449 + }, + { + "epoch": 0.7613270611537509, + "grad_norm": 2.908661388483687, + "learning_rate": 4.2630860503362083e-07, + "loss": 0.5053, + "step": 18450 + }, + { + "epoch": 0.7613683254931088, + "grad_norm": 2.0544754808755123, + "learning_rate": 4.2616861997779533e-07, + "loss": 0.4618, + "step": 18451 + }, + { + "epoch": 0.7614095898324668, + "grad_norm": 4.839458114313281, + "learning_rate": 4.26028654103285e-07, + "loss": 0.4348, + "step": 18452 + }, + { + "epoch": 0.7614508541718247, + "grad_norm": 5.574879008208335, + "learning_rate": 4.2588870741258904e-07, + "loss": 0.5799, + "step": 18453 + }, + { + "epoch": 0.7614921185111826, + "grad_norm": 2.7394335327684622, + "learning_rate": 4.257487799082078e-07, + "loss": 0.4582, + "step": 18454 + }, + { + "epoch": 0.7615333828505405, + "grad_norm": 3.807554774765415, + "learning_rate": 4.256088715926408e-07, + "loss": 0.5175, + "step": 18455 + }, + { + "epoch": 0.7615746471898985, + "grad_norm": 8.305290445620738, + "learning_rate": 4.2546898246838743e-07, + "loss": 0.454, + "step": 18456 + }, + { + "epoch": 0.7616159115292565, + "grad_norm": 4.137718631265374, + "learning_rate": 4.2532911253794614e-07, + "loss": 0.5321, + "step": 18457 + }, + { + "epoch": 0.7616571758686144, + "grad_norm": 2.884815480857108, + "learning_rate": 4.251892618038149e-07, + "loss": 0.5043, + "step": 18458 + }, + { + "epoch": 0.7616984402079723, + "grad_norm": 2.0801089933982575, + "learning_rate": 4.250494302684922e-07, + "loss": 0.5934, + "step": 18459 + }, + { + "epoch": 0.7617397045473302, + "grad_norm": 4.30597069088011, + "learning_rate": 4.249096179344757e-07, + "loss": 0.4901, + "step": 18460 + }, + { + "epoch": 0.7617809688866881, + "grad_norm": 3.616903950886759, + "learning_rate": 4.247698248042634e-07, + "loss": 0.4484, + "step": 18461 + }, + { + "epoch": 0.761822233226046, + "grad_norm": 5.777999509903496, + "learning_rate": 4.2463005088035134e-07, + "loss": 0.5028, + "step": 18462 + }, + { + "epoch": 0.761863497565404, + "grad_norm": 3.0069330668815013, + "learning_rate": 4.2449029616523683e-07, + "loss": 0.5222, + "step": 18463 + }, + { + "epoch": 0.7619047619047619, + "grad_norm": 5.4881729044340535, + "learning_rate": 4.243505606614162e-07, + "loss": 0.561, + "step": 18464 + }, + { + "epoch": 0.7619460262441198, + "grad_norm": 4.161847911025543, + "learning_rate": 4.242108443713856e-07, + "loss": 0.5131, + "step": 18465 + }, + { + "epoch": 0.7619872905834778, + "grad_norm": 6.853441599402359, + "learning_rate": 4.240711472976403e-07, + "loss": 0.5172, + "step": 18466 + }, + { + "epoch": 0.7620285549228357, + "grad_norm": 4.1556690752756404, + "learning_rate": 4.239314694426759e-07, + "loss": 0.5511, + "step": 18467 + }, + { + "epoch": 0.7620698192621936, + "grad_norm": 3.5229493787089083, + "learning_rate": 4.237918108089877e-07, + "loss": 0.5089, + "step": 18468 + }, + { + "epoch": 0.7621110836015516, + "grad_norm": 2.3089368832809005, + "learning_rate": 4.2365217139906967e-07, + "loss": 0.5824, + "step": 18469 + }, + { + "epoch": 0.7621523479409095, + "grad_norm": 2.5667368985026053, + "learning_rate": 4.2351255121541684e-07, + "loss": 0.4819, + "step": 18470 + }, + { + "epoch": 0.7621936122802674, + "grad_norm": 9.054436453098537, + "learning_rate": 4.2337295026052237e-07, + "loss": 0.525, + "step": 18471 + }, + { + "epoch": 0.7622348766196253, + "grad_norm": 2.9247101651604663, + "learning_rate": 4.2323336853688035e-07, + "loss": 0.5101, + "step": 18472 + }, + { + "epoch": 0.7622761409589832, + "grad_norm": 3.415613316392372, + "learning_rate": 4.230938060469844e-07, + "loss": 0.565, + "step": 18473 + }, + { + "epoch": 0.7623174052983411, + "grad_norm": 4.008248955513995, + "learning_rate": 4.2295426279332673e-07, + "loss": 0.4434, + "step": 18474 + }, + { + "epoch": 0.762358669637699, + "grad_norm": 3.2271280917147114, + "learning_rate": 4.2281473877840014e-07, + "loss": 0.5715, + "step": 18475 + }, + { + "epoch": 0.7623999339770571, + "grad_norm": 3.186746539502477, + "learning_rate": 4.2267523400469706e-07, + "loss": 0.5035, + "step": 18476 + }, + { + "epoch": 0.762441198316415, + "grad_norm": 2.537551793127874, + "learning_rate": 4.225357484747099e-07, + "loss": 0.5245, + "step": 18477 + }, + { + "epoch": 0.7624824626557729, + "grad_norm": 3.43363583138024, + "learning_rate": 4.223962821909292e-07, + "loss": 0.5149, + "step": 18478 + }, + { + "epoch": 0.7625237269951308, + "grad_norm": 2.247426644623455, + "learning_rate": 4.2225683515584663e-07, + "loss": 0.5226, + "step": 18479 + }, + { + "epoch": 0.7625649913344887, + "grad_norm": 3.359728223528922, + "learning_rate": 4.22117407371953e-07, + "loss": 0.547, + "step": 18480 + }, + { + "epoch": 0.7626062556738467, + "grad_norm": 4.963779150766213, + "learning_rate": 4.219779988417396e-07, + "loss": 0.5782, + "step": 18481 + }, + { + "epoch": 0.7626475200132046, + "grad_norm": 3.7723116112973214, + "learning_rate": 4.218386095676953e-07, + "loss": 0.5128, + "step": 18482 + }, + { + "epoch": 0.7626887843525625, + "grad_norm": 5.297744746248301, + "learning_rate": 4.21699239552311e-07, + "loss": 0.4986, + "step": 18483 + }, + { + "epoch": 0.7627300486919204, + "grad_norm": 5.484261324311731, + "learning_rate": 4.2155988879807544e-07, + "loss": 0.4838, + "step": 18484 + }, + { + "epoch": 0.7627713130312783, + "grad_norm": 2.6532237776603043, + "learning_rate": 4.2142055730747855e-07, + "loss": 0.4966, + "step": 18485 + }, + { + "epoch": 0.7628125773706363, + "grad_norm": 2.9627848499008427, + "learning_rate": 4.212812450830083e-07, + "loss": 0.5071, + "step": 18486 + }, + { + "epoch": 0.7628538417099943, + "grad_norm": 3.631560775524715, + "learning_rate": 4.211419521271535e-07, + "loss": 0.4368, + "step": 18487 + }, + { + "epoch": 0.7628951060493522, + "grad_norm": 3.076498609113591, + "learning_rate": 4.2100267844240213e-07, + "loss": 0.5099, + "step": 18488 + }, + { + "epoch": 0.7629363703887101, + "grad_norm": 2.566604674320519, + "learning_rate": 4.2086342403124287e-07, + "loss": 0.4815, + "step": 18489 + }, + { + "epoch": 0.762977634728068, + "grad_norm": 2.8764019225347015, + "learning_rate": 4.207241888961619e-07, + "loss": 0.4714, + "step": 18490 + }, + { + "epoch": 0.7630188990674259, + "grad_norm": 3.918253965550056, + "learning_rate": 4.2058497303964677e-07, + "loss": 0.5369, + "step": 18491 + }, + { + "epoch": 0.7630601634067838, + "grad_norm": 3.1684014396617033, + "learning_rate": 4.204457764641844e-07, + "loss": 0.4812, + "step": 18492 + }, + { + "epoch": 0.7631014277461418, + "grad_norm": 6.322300045444291, + "learning_rate": 4.2030659917226165e-07, + "loss": 0.4929, + "step": 18493 + }, + { + "epoch": 0.7631426920854997, + "grad_norm": 1.8473718023268324, + "learning_rate": 4.201674411663634e-07, + "loss": 0.5031, + "step": 18494 + }, + { + "epoch": 0.7631839564248576, + "grad_norm": 6.253925965015927, + "learning_rate": 4.2002830244897597e-07, + "loss": 0.4937, + "step": 18495 + }, + { + "epoch": 0.7632252207642156, + "grad_norm": 3.3298899094596317, + "learning_rate": 4.1988918302258527e-07, + "loss": 0.5332, + "step": 18496 + }, + { + "epoch": 0.7632664851035735, + "grad_norm": 3.097393653161938, + "learning_rate": 4.1975008288967535e-07, + "loss": 0.5389, + "step": 18497 + }, + { + "epoch": 0.7633077494429314, + "grad_norm": 9.104960702974049, + "learning_rate": 4.1961100205273163e-07, + "loss": 0.4684, + "step": 18498 + }, + { + "epoch": 0.7633490137822894, + "grad_norm": 2.8870035827473, + "learning_rate": 4.1947194051423786e-07, + "loss": 0.5313, + "step": 18499 + }, + { + "epoch": 0.7633902781216473, + "grad_norm": 2.279511217784065, + "learning_rate": 4.1933289827667814e-07, + "loss": 0.504, + "step": 18500 + }, + { + "epoch": 0.7634315424610052, + "grad_norm": 2.7259914567684667, + "learning_rate": 4.1919387534253643e-07, + "loss": 0.4651, + "step": 18501 + }, + { + "epoch": 0.7634728068003631, + "grad_norm": 3.432252072886878, + "learning_rate": 4.190548717142963e-07, + "loss": 0.5129, + "step": 18502 + }, + { + "epoch": 0.763514071139721, + "grad_norm": 3.261386661883201, + "learning_rate": 4.189158873944397e-07, + "loss": 0.5153, + "step": 18503 + }, + { + "epoch": 0.763555335479079, + "grad_norm": 4.034745094804528, + "learning_rate": 4.1877692238544986e-07, + "loss": 0.5294, + "step": 18504 + }, + { + "epoch": 0.7635965998184369, + "grad_norm": 4.456324263037312, + "learning_rate": 4.186379766898094e-07, + "loss": 0.5046, + "step": 18505 + }, + { + "epoch": 0.7636378641577949, + "grad_norm": 2.358623970184184, + "learning_rate": 4.184990503099994e-07, + "loss": 0.4688, + "step": 18506 + }, + { + "epoch": 0.7636791284971528, + "grad_norm": 3.5651125702864306, + "learning_rate": 4.1836014324850203e-07, + "loss": 0.4932, + "step": 18507 + }, + { + "epoch": 0.7637203928365107, + "grad_norm": 2.686781047751352, + "learning_rate": 4.182212555077981e-07, + "loss": 0.5239, + "step": 18508 + }, + { + "epoch": 0.7637616571758686, + "grad_norm": 5.216920882958235, + "learning_rate": 4.1808238709036934e-07, + "loss": 0.5048, + "step": 18509 + }, + { + "epoch": 0.7638029215152266, + "grad_norm": 6.061101223323794, + "learning_rate": 4.1794353799869575e-07, + "loss": 0.4902, + "step": 18510 + }, + { + "epoch": 0.7638441858545845, + "grad_norm": 2.7760078304394575, + "learning_rate": 4.178047082352568e-07, + "loss": 0.4747, + "step": 18511 + }, + { + "epoch": 0.7638854501939424, + "grad_norm": 3.005962252004998, + "learning_rate": 4.176658978025332e-07, + "loss": 0.5118, + "step": 18512 + }, + { + "epoch": 0.7639267145333003, + "grad_norm": 2.4445813158319405, + "learning_rate": 4.175271067030042e-07, + "loss": 0.5033, + "step": 18513 + }, + { + "epoch": 0.7639679788726582, + "grad_norm": 8.723771936518864, + "learning_rate": 4.173883349391494e-07, + "loss": 0.5406, + "step": 18514 + }, + { + "epoch": 0.7640092432120161, + "grad_norm": 3.510979262379961, + "learning_rate": 4.172495825134467e-07, + "loss": 0.5235, + "step": 18515 + }, + { + "epoch": 0.764050507551374, + "grad_norm": 2.4885159623611064, + "learning_rate": 4.1711084942837523e-07, + "loss": 0.4639, + "step": 18516 + }, + { + "epoch": 0.7640917718907321, + "grad_norm": 5.479359336200382, + "learning_rate": 4.169721356864129e-07, + "loss": 0.5184, + "step": 18517 + }, + { + "epoch": 0.76413303623009, + "grad_norm": 7.934787931010247, + "learning_rate": 4.1683344129003805e-07, + "loss": 0.5004, + "step": 18518 + }, + { + "epoch": 0.7641743005694479, + "grad_norm": 2.4905236032089695, + "learning_rate": 4.1669476624172715e-07, + "loss": 0.5208, + "step": 18519 + }, + { + "epoch": 0.7642155649088058, + "grad_norm": 6.238968129567303, + "learning_rate": 4.165561105439576e-07, + "loss": 0.4844, + "step": 18520 + }, + { + "epoch": 0.7642568292481637, + "grad_norm": 5.4485403677924165, + "learning_rate": 4.164174741992064e-07, + "loss": 0.5639, + "step": 18521 + }, + { + "epoch": 0.7642980935875217, + "grad_norm": 3.6557955260966466, + "learning_rate": 4.1627885720995025e-07, + "loss": 0.4948, + "step": 18522 + }, + { + "epoch": 0.7643393579268796, + "grad_norm": 3.495122560067566, + "learning_rate": 4.161402595786648e-07, + "loss": 0.5653, + "step": 18523 + }, + { + "epoch": 0.7643806222662375, + "grad_norm": 2.292580761967236, + "learning_rate": 4.160016813078253e-07, + "loss": 0.481, + "step": 18524 + }, + { + "epoch": 0.7644218866055954, + "grad_norm": 3.491122288716308, + "learning_rate": 4.1586312239990735e-07, + "loss": 0.5325, + "step": 18525 + }, + { + "epoch": 0.7644631509449533, + "grad_norm": 7.31207576925441, + "learning_rate": 4.157245828573867e-07, + "loss": 0.5241, + "step": 18526 + }, + { + "epoch": 0.7645044152843113, + "grad_norm": 3.2518324252216284, + "learning_rate": 4.155860626827369e-07, + "loss": 0.559, + "step": 18527 + }, + { + "epoch": 0.7645456796236693, + "grad_norm": 3.6636122950978476, + "learning_rate": 4.154475618784329e-07, + "loss": 0.5288, + "step": 18528 + }, + { + "epoch": 0.7645869439630272, + "grad_norm": 13.912066137916488, + "learning_rate": 4.153090804469486e-07, + "loss": 0.5614, + "step": 18529 + }, + { + "epoch": 0.7646282083023851, + "grad_norm": 5.269872299489212, + "learning_rate": 4.15170618390758e-07, + "loss": 0.5436, + "step": 18530 + }, + { + "epoch": 0.764669472641743, + "grad_norm": 2.316676368230026, + "learning_rate": 4.150321757123336e-07, + "loss": 0.4308, + "step": 18531 + }, + { + "epoch": 0.7647107369811009, + "grad_norm": 2.215873461227487, + "learning_rate": 4.1489375241414864e-07, + "loss": 0.4725, + "step": 18532 + }, + { + "epoch": 0.7647520013204588, + "grad_norm": 2.1547062698898345, + "learning_rate": 4.147553484986757e-07, + "loss": 0.4981, + "step": 18533 + }, + { + "epoch": 0.7647932656598168, + "grad_norm": 3.116222865900148, + "learning_rate": 4.146169639683878e-07, + "loss": 0.5065, + "step": 18534 + }, + { + "epoch": 0.7648345299991747, + "grad_norm": 4.631607471958517, + "learning_rate": 4.144785988257559e-07, + "loss": 0.5303, + "step": 18535 + }, + { + "epoch": 0.7648757943385326, + "grad_norm": 3.1736089164584214, + "learning_rate": 4.1434025307325147e-07, + "loss": 0.5085, + "step": 18536 + }, + { + "epoch": 0.7649170586778906, + "grad_norm": 4.049318052715306, + "learning_rate": 4.1420192671334604e-07, + "loss": 0.545, + "step": 18537 + }, + { + "epoch": 0.7649583230172485, + "grad_norm": 2.6121627867164463, + "learning_rate": 4.1406361974851035e-07, + "loss": 0.5442, + "step": 18538 + }, + { + "epoch": 0.7649995873566064, + "grad_norm": 2.4481499863946232, + "learning_rate": 4.139253321812154e-07, + "loss": 0.4898, + "step": 18539 + }, + { + "epoch": 0.7650408516959644, + "grad_norm": 4.304830493287939, + "learning_rate": 4.137870640139307e-07, + "loss": 0.4806, + "step": 18540 + }, + { + "epoch": 0.7650821160353223, + "grad_norm": 4.995014786204577, + "learning_rate": 4.136488152491262e-07, + "loss": 0.5372, + "step": 18541 + }, + { + "epoch": 0.7651233803746802, + "grad_norm": 6.496755158089825, + "learning_rate": 4.135105858892716e-07, + "loss": 0.53, + "step": 18542 + }, + { + "epoch": 0.7651646447140381, + "grad_norm": 1.7300383652150169, + "learning_rate": 4.133723759368362e-07, + "loss": 0.5046, + "step": 18543 + }, + { + "epoch": 0.765205909053396, + "grad_norm": 3.0259977801361253, + "learning_rate": 4.1323418539428816e-07, + "loss": 0.4788, + "step": 18544 + }, + { + "epoch": 0.7652471733927539, + "grad_norm": 15.485701801514425, + "learning_rate": 4.1309601426409614e-07, + "loss": 0.5366, + "step": 18545 + }, + { + "epoch": 0.7652884377321119, + "grad_norm": 11.846762845339718, + "learning_rate": 4.129578625487289e-07, + "loss": 0.4822, + "step": 18546 + }, + { + "epoch": 0.7653297020714699, + "grad_norm": 2.294699151479267, + "learning_rate": 4.1281973025065313e-07, + "loss": 0.5022, + "step": 18547 + }, + { + "epoch": 0.7653709664108278, + "grad_norm": 3.8444290910612957, + "learning_rate": 4.1268161737233663e-07, + "loss": 0.483, + "step": 18548 + }, + { + "epoch": 0.7654122307501857, + "grad_norm": 3.286902426268634, + "learning_rate": 4.1254352391624696e-07, + "loss": 0.4886, + "step": 18549 + }, + { + "epoch": 0.7654534950895436, + "grad_norm": 4.787165887092285, + "learning_rate": 4.1240544988485e-07, + "loss": 0.5827, + "step": 18550 + }, + { + "epoch": 0.7654947594289016, + "grad_norm": 2.559427658344855, + "learning_rate": 4.122673952806127e-07, + "loss": 0.4764, + "step": 18551 + }, + { + "epoch": 0.7655360237682595, + "grad_norm": 5.969760200026401, + "learning_rate": 4.121293601060005e-07, + "loss": 0.4943, + "step": 18552 + }, + { + "epoch": 0.7655772881076174, + "grad_norm": 2.3448724238136665, + "learning_rate": 4.119913443634794e-07, + "loss": 0.5049, + "step": 18553 + }, + { + "epoch": 0.7656185524469753, + "grad_norm": 2.7331485894956433, + "learning_rate": 4.118533480555146e-07, + "loss": 0.5498, + "step": 18554 + }, + { + "epoch": 0.7656598167863332, + "grad_norm": 2.383830911044227, + "learning_rate": 4.1171537118457147e-07, + "loss": 0.5323, + "step": 18555 + }, + { + "epoch": 0.7657010811256911, + "grad_norm": 2.148196112457346, + "learning_rate": 4.11577413753114e-07, + "loss": 0.5126, + "step": 18556 + }, + { + "epoch": 0.7657423454650492, + "grad_norm": 4.019045985328638, + "learning_rate": 4.114394757636068e-07, + "loss": 0.4619, + "step": 18557 + }, + { + "epoch": 0.7657836098044071, + "grad_norm": 2.288495522953474, + "learning_rate": 4.113015572185135e-07, + "loss": 0.4685, + "step": 18558 + }, + { + "epoch": 0.765824874143765, + "grad_norm": 2.194776492419653, + "learning_rate": 4.1116365812029844e-07, + "loss": 0.511, + "step": 18559 + }, + { + "epoch": 0.7658661384831229, + "grad_norm": 4.525394302888882, + "learning_rate": 4.1102577847142384e-07, + "loss": 0.5156, + "step": 18560 + }, + { + "epoch": 0.7659074028224808, + "grad_norm": 3.105029599676618, + "learning_rate": 4.1088791827435314e-07, + "loss": 0.557, + "step": 18561 + }, + { + "epoch": 0.7659486671618387, + "grad_norm": 2.751606724731356, + "learning_rate": 4.107500775315491e-07, + "loss": 0.4992, + "step": 18562 + }, + { + "epoch": 0.7659899315011967, + "grad_norm": 3.2795790181619866, + "learning_rate": 4.106122562454736e-07, + "loss": 0.4885, + "step": 18563 + }, + { + "epoch": 0.7660311958405546, + "grad_norm": 1.9187273382543066, + "learning_rate": 4.1047445441858786e-07, + "loss": 0.4997, + "step": 18564 + }, + { + "epoch": 0.7660724601799125, + "grad_norm": 4.438303240058525, + "learning_rate": 4.1033667205335413e-07, + "loss": 0.5285, + "step": 18565 + }, + { + "epoch": 0.7661137245192704, + "grad_norm": 3.6411910181952396, + "learning_rate": 4.1019890915223333e-07, + "loss": 0.539, + "step": 18566 + }, + { + "epoch": 0.7661549888586284, + "grad_norm": 7.9638823543089865, + "learning_rate": 4.1006116571768657e-07, + "loss": 0.5161, + "step": 18567 + }, + { + "epoch": 0.7661962531979863, + "grad_norm": 11.220915933706058, + "learning_rate": 4.099234417521737e-07, + "loss": 0.504, + "step": 18568 + }, + { + "epoch": 0.7662375175373443, + "grad_norm": 2.602422255175391, + "learning_rate": 4.097857372581551e-07, + "loss": 0.5006, + "step": 18569 + }, + { + "epoch": 0.7662787818767022, + "grad_norm": 3.227956419406871, + "learning_rate": 4.096480522380904e-07, + "loss": 0.4736, + "step": 18570 + }, + { + "epoch": 0.7663200462160601, + "grad_norm": 3.294673062567442, + "learning_rate": 4.0951038669443974e-07, + "loss": 0.4876, + "step": 18571 + }, + { + "epoch": 0.766361310555418, + "grad_norm": 4.764616080526938, + "learning_rate": 4.0937274062966106e-07, + "loss": 0.4721, + "step": 18572 + }, + { + "epoch": 0.7664025748947759, + "grad_norm": 3.3528768252334156, + "learning_rate": 4.0923511404621363e-07, + "loss": 0.4999, + "step": 18573 + }, + { + "epoch": 0.7664438392341338, + "grad_norm": 2.895987380744507, + "learning_rate": 4.0909750694655554e-07, + "loss": 0.4978, + "step": 18574 + }, + { + "epoch": 0.7664851035734918, + "grad_norm": 1.9349753614290193, + "learning_rate": 4.0895991933314563e-07, + "loss": 0.463, + "step": 18575 + }, + { + "epoch": 0.7665263679128497, + "grad_norm": 5.890024308233857, + "learning_rate": 4.0882235120844087e-07, + "loss": 0.5604, + "step": 18576 + }, + { + "epoch": 0.7665676322522076, + "grad_norm": 2.451108509686973, + "learning_rate": 4.086848025748981e-07, + "loss": 0.5383, + "step": 18577 + }, + { + "epoch": 0.7666088965915656, + "grad_norm": 2.25517776986029, + "learning_rate": 4.0854727343497487e-07, + "loss": 0.5084, + "step": 18578 + }, + { + "epoch": 0.7666501609309235, + "grad_norm": 1.9749767960128648, + "learning_rate": 4.0840976379112774e-07, + "loss": 0.5295, + "step": 18579 + }, + { + "epoch": 0.7666914252702814, + "grad_norm": 26.61862261535827, + "learning_rate": 4.082722736458134e-07, + "loss": 0.5137, + "step": 18580 + }, + { + "epoch": 0.7667326896096394, + "grad_norm": 3.0667811465760475, + "learning_rate": 4.081348030014869e-07, + "loss": 0.5158, + "step": 18581 + }, + { + "epoch": 0.7667739539489973, + "grad_norm": 6.581759365418816, + "learning_rate": 4.079973518606043e-07, + "loss": 0.4873, + "step": 18582 + }, + { + "epoch": 0.7668152182883552, + "grad_norm": 2.5391013868544703, + "learning_rate": 4.078599202256212e-07, + "loss": 0.5658, + "step": 18583 + }, + { + "epoch": 0.7668564826277131, + "grad_norm": 4.26999163864334, + "learning_rate": 4.077225080989915e-07, + "loss": 0.5337, + "step": 18584 + }, + { + "epoch": 0.766897746967071, + "grad_norm": 3.6647754584383585, + "learning_rate": 4.0758511548317033e-07, + "loss": 0.4864, + "step": 18585 + }, + { + "epoch": 0.7669390113064289, + "grad_norm": 9.845421382127178, + "learning_rate": 4.0744774238061185e-07, + "loss": 0.5567, + "step": 18586 + }, + { + "epoch": 0.7669802756457869, + "grad_norm": 1.9830427414812881, + "learning_rate": 4.073103887937702e-07, + "loss": 0.5117, + "step": 18587 + }, + { + "epoch": 0.7670215399851449, + "grad_norm": 40.183345377718595, + "learning_rate": 4.071730547250986e-07, + "loss": 0.5582, + "step": 18588 + }, + { + "epoch": 0.7670628043245028, + "grad_norm": 4.105753561080398, + "learning_rate": 4.0703574017704956e-07, + "loss": 0.4695, + "step": 18589 + }, + { + "epoch": 0.7671040686638607, + "grad_norm": 2.299993286709529, + "learning_rate": 4.068984451520764e-07, + "loss": 0.4902, + "step": 18590 + }, + { + "epoch": 0.7671453330032186, + "grad_norm": 2.5485470555926453, + "learning_rate": 4.067611696526315e-07, + "loss": 0.5066, + "step": 18591 + }, + { + "epoch": 0.7671865973425765, + "grad_norm": 2.147968218829675, + "learning_rate": 4.0662391368116746e-07, + "loss": 0.4779, + "step": 18592 + }, + { + "epoch": 0.7672278616819345, + "grad_norm": 3.507985545952338, + "learning_rate": 4.0648667724013506e-07, + "loss": 0.5489, + "step": 18593 + }, + { + "epoch": 0.7672691260212924, + "grad_norm": 2.9266023004566595, + "learning_rate": 4.0634946033198613e-07, + "loss": 0.5115, + "step": 18594 + }, + { + "epoch": 0.7673103903606503, + "grad_norm": 2.434609343850349, + "learning_rate": 4.062122629591717e-07, + "loss": 0.5148, + "step": 18595 + }, + { + "epoch": 0.7673516547000082, + "grad_norm": 9.038795498606197, + "learning_rate": 4.06075085124143e-07, + "loss": 0.4824, + "step": 18596 + }, + { + "epoch": 0.7673929190393661, + "grad_norm": 4.182367017864281, + "learning_rate": 4.059379268293494e-07, + "loss": 0.5464, + "step": 18597 + }, + { + "epoch": 0.7674341833787242, + "grad_norm": 3.2943040695211026, + "learning_rate": 4.058007880772414e-07, + "loss": 0.5122, + "step": 18598 + }, + { + "epoch": 0.7674754477180821, + "grad_norm": 9.364553875750802, + "learning_rate": 4.0566366887026845e-07, + "loss": 0.4886, + "step": 18599 + }, + { + "epoch": 0.76751671205744, + "grad_norm": 6.992076036519248, + "learning_rate": 4.0552656921088055e-07, + "loss": 0.5149, + "step": 18600 + }, + { + "epoch": 0.7675579763967979, + "grad_norm": 3.6917327078315294, + "learning_rate": 4.0538948910152606e-07, + "loss": 0.5396, + "step": 18601 + }, + { + "epoch": 0.7675992407361558, + "grad_norm": 3.222495483933934, + "learning_rate": 4.052524285446532e-07, + "loss": 0.5328, + "step": 18602 + }, + { + "epoch": 0.7676405050755137, + "grad_norm": 5.550167972789869, + "learning_rate": 4.0511538754271054e-07, + "loss": 0.5244, + "step": 18603 + }, + { + "epoch": 0.7676817694148717, + "grad_norm": 2.6722206639740946, + "learning_rate": 4.049783660981464e-07, + "loss": 0.5353, + "step": 18604 + }, + { + "epoch": 0.7677230337542296, + "grad_norm": 3.391089079731419, + "learning_rate": 4.048413642134077e-07, + "loss": 0.5401, + "step": 18605 + }, + { + "epoch": 0.7677642980935875, + "grad_norm": 2.885519666664307, + "learning_rate": 4.0470438189094184e-07, + "loss": 0.4915, + "step": 18606 + }, + { + "epoch": 0.7678055624329454, + "grad_norm": 2.5810669404990616, + "learning_rate": 4.045674191331956e-07, + "loss": 0.4858, + "step": 18607 + }, + { + "epoch": 0.7678468267723034, + "grad_norm": 5.632554423348652, + "learning_rate": 4.044304759426163e-07, + "loss": 0.5183, + "step": 18608 + }, + { + "epoch": 0.7678880911116613, + "grad_norm": 3.750844636022593, + "learning_rate": 4.0429355232164874e-07, + "loss": 0.5514, + "step": 18609 + }, + { + "epoch": 0.7679293554510193, + "grad_norm": 2.409799364250237, + "learning_rate": 4.041566482727395e-07, + "loss": 0.4931, + "step": 18610 + }, + { + "epoch": 0.7679706197903772, + "grad_norm": 2.2860132837391363, + "learning_rate": 4.040197637983337e-07, + "loss": 0.5223, + "step": 18611 + }, + { + "epoch": 0.7680118841297351, + "grad_norm": 4.384412709537349, + "learning_rate": 4.038828989008772e-07, + "loss": 0.5101, + "step": 18612 + }, + { + "epoch": 0.768053148469093, + "grad_norm": 2.4297201622845117, + "learning_rate": 4.037460535828138e-07, + "loss": 0.5311, + "step": 18613 + }, + { + "epoch": 0.7680944128084509, + "grad_norm": 3.456648479868459, + "learning_rate": 4.0360922784658873e-07, + "loss": 0.5487, + "step": 18614 + }, + { + "epoch": 0.7681356771478088, + "grad_norm": 2.903628038095319, + "learning_rate": 4.0347242169464515e-07, + "loss": 0.4721, + "step": 18615 + }, + { + "epoch": 0.7681769414871668, + "grad_norm": 3.6912272630568435, + "learning_rate": 4.0333563512942717e-07, + "loss": 0.5049, + "step": 18616 + }, + { + "epoch": 0.7682182058265247, + "grad_norm": 2.5089246880099507, + "learning_rate": 4.0319886815337864e-07, + "loss": 0.5599, + "step": 18617 + }, + { + "epoch": 0.7682594701658827, + "grad_norm": 4.181809298069872, + "learning_rate": 4.030621207689418e-07, + "loss": 0.4396, + "step": 18618 + }, + { + "epoch": 0.7683007345052406, + "grad_norm": 3.6095445368937913, + "learning_rate": 4.0292539297855946e-07, + "loss": 0.568, + "step": 18619 + }, + { + "epoch": 0.7683419988445985, + "grad_norm": 4.905702461805103, + "learning_rate": 4.027886847846741e-07, + "loss": 0.54, + "step": 18620 + }, + { + "epoch": 0.7683832631839564, + "grad_norm": 2.1209183401533442, + "learning_rate": 4.02651996189728e-07, + "loss": 0.5058, + "step": 18621 + }, + { + "epoch": 0.7684245275233144, + "grad_norm": 40.466098992247794, + "learning_rate": 4.025153271961619e-07, + "loss": 0.5413, + "step": 18622 + }, + { + "epoch": 0.7684657918626723, + "grad_norm": 3.2165923569128547, + "learning_rate": 4.023786778064177e-07, + "loss": 0.5209, + "step": 18623 + }, + { + "epoch": 0.7685070562020302, + "grad_norm": 2.380841898091731, + "learning_rate": 4.0224204802293654e-07, + "loss": 0.4757, + "step": 18624 + }, + { + "epoch": 0.7685483205413881, + "grad_norm": 2.7834927868664407, + "learning_rate": 4.0210543784815813e-07, + "loss": 0.4749, + "step": 18625 + }, + { + "epoch": 0.768589584880746, + "grad_norm": 3.206417644554645, + "learning_rate": 4.019688472845229e-07, + "loss": 0.4796, + "step": 18626 + }, + { + "epoch": 0.7686308492201039, + "grad_norm": 3.5808709529827434, + "learning_rate": 4.0183227633447117e-07, + "loss": 0.5291, + "step": 18627 + }, + { + "epoch": 0.768672113559462, + "grad_norm": 3.6549841050962217, + "learning_rate": 4.0169572500044253e-07, + "loss": 0.5337, + "step": 18628 + }, + { + "epoch": 0.7687133778988199, + "grad_norm": 4.576069867172696, + "learning_rate": 4.0155919328487596e-07, + "loss": 0.5152, + "step": 18629 + }, + { + "epoch": 0.7687546422381778, + "grad_norm": 3.489677759886907, + "learning_rate": 4.0142268119020944e-07, + "loss": 0.5521, + "step": 18630 + }, + { + "epoch": 0.7687959065775357, + "grad_norm": 2.7303085964490776, + "learning_rate": 4.01286188718882e-07, + "loss": 0.5246, + "step": 18631 + }, + { + "epoch": 0.7688371709168936, + "grad_norm": 21.190979222988073, + "learning_rate": 4.0114971587333193e-07, + "loss": 0.4911, + "step": 18632 + }, + { + "epoch": 0.7688784352562515, + "grad_norm": 10.59784034538125, + "learning_rate": 4.0101326265599726e-07, + "loss": 0.505, + "step": 18633 + }, + { + "epoch": 0.7689196995956095, + "grad_norm": 5.206455292998752, + "learning_rate": 4.008768290693145e-07, + "loss": 0.5291, + "step": 18634 + }, + { + "epoch": 0.7689609639349674, + "grad_norm": 2.9340886932695063, + "learning_rate": 4.0074041511572106e-07, + "loss": 0.507, + "step": 18635 + }, + { + "epoch": 0.7690022282743253, + "grad_norm": 5.899977271383612, + "learning_rate": 4.0060402079765384e-07, + "loss": 0.4686, + "step": 18636 + }, + { + "epoch": 0.7690434926136832, + "grad_norm": 4.82473686990899, + "learning_rate": 4.0046764611754956e-07, + "loss": 0.5444, + "step": 18637 + }, + { + "epoch": 0.7690847569530411, + "grad_norm": 2.2365702275691506, + "learning_rate": 4.0033129107784324e-07, + "loss": 0.5295, + "step": 18638 + }, + { + "epoch": 0.7691260212923992, + "grad_norm": 2.9900462402924357, + "learning_rate": 4.0019495568097093e-07, + "loss": 0.5236, + "step": 18639 + }, + { + "epoch": 0.7691672856317571, + "grad_norm": 2.639163553683514, + "learning_rate": 4.0005863992936803e-07, + "loss": 0.4646, + "step": 18640 + }, + { + "epoch": 0.769208549971115, + "grad_norm": 6.537376403494331, + "learning_rate": 3.999223438254704e-07, + "loss": 0.5076, + "step": 18641 + }, + { + "epoch": 0.7692498143104729, + "grad_norm": 2.1979952389397734, + "learning_rate": 3.9978606737171086e-07, + "loss": 0.525, + "step": 18642 + }, + { + "epoch": 0.7692910786498308, + "grad_norm": 4.0303831900139215, + "learning_rate": 3.9964981057052434e-07, + "loss": 0.4679, + "step": 18643 + }, + { + "epoch": 0.7693323429891887, + "grad_norm": 2.79410161651509, + "learning_rate": 3.995135734243448e-07, + "loss": 0.4973, + "step": 18644 + }, + { + "epoch": 0.7693736073285466, + "grad_norm": 1.983570106013226, + "learning_rate": 3.9937735593560645e-07, + "loss": 0.5362, + "step": 18645 + }, + { + "epoch": 0.7694148716679046, + "grad_norm": 2.4945318061940895, + "learning_rate": 3.992411581067413e-07, + "loss": 0.5432, + "step": 18646 + }, + { + "epoch": 0.7694561360072625, + "grad_norm": 8.19550639127272, + "learning_rate": 3.991049799401827e-07, + "loss": 0.5189, + "step": 18647 + }, + { + "epoch": 0.7694974003466204, + "grad_norm": 3.5026695896896, + "learning_rate": 3.9896882143836336e-07, + "loss": 0.5143, + "step": 18648 + }, + { + "epoch": 0.7695386646859784, + "grad_norm": 33.82921796808086, + "learning_rate": 3.988326826037156e-07, + "loss": 0.5018, + "step": 18649 + }, + { + "epoch": 0.7695799290253363, + "grad_norm": 4.514152113026679, + "learning_rate": 3.986965634386704e-07, + "loss": 0.5123, + "step": 18650 + }, + { + "epoch": 0.7696211933646943, + "grad_norm": 3.4086640540711803, + "learning_rate": 3.9856046394565953e-07, + "loss": 0.4877, + "step": 18651 + }, + { + "epoch": 0.7696624577040522, + "grad_norm": 3.8224497973803313, + "learning_rate": 3.9842438412711425e-07, + "loss": 0.5229, + "step": 18652 + }, + { + "epoch": 0.7697037220434101, + "grad_norm": 21.096679458078356, + "learning_rate": 3.9828832398546556e-07, + "loss": 0.5536, + "step": 18653 + }, + { + "epoch": 0.769744986382768, + "grad_norm": 1.749464748591623, + "learning_rate": 3.9815228352314335e-07, + "loss": 0.4701, + "step": 18654 + }, + { + "epoch": 0.7697862507221259, + "grad_norm": 3.1726234084254226, + "learning_rate": 3.980162627425776e-07, + "loss": 0.4546, + "step": 18655 + }, + { + "epoch": 0.7698275150614838, + "grad_norm": 2.502528219690905, + "learning_rate": 3.978802616461979e-07, + "loss": 0.5261, + "step": 18656 + }, + { + "epoch": 0.7698687794008418, + "grad_norm": 4.776174874390302, + "learning_rate": 3.9774428023643387e-07, + "loss": 0.5639, + "step": 18657 + }, + { + "epoch": 0.7699100437401997, + "grad_norm": 3.840285099094318, + "learning_rate": 3.9760831851571496e-07, + "loss": 0.51, + "step": 18658 + }, + { + "epoch": 0.7699513080795577, + "grad_norm": 4.550484088882994, + "learning_rate": 3.974723764864686e-07, + "loss": 0.5556, + "step": 18659 + }, + { + "epoch": 0.7699925724189156, + "grad_norm": 2.7343708267929654, + "learning_rate": 3.9733645415112395e-07, + "loss": 0.5155, + "step": 18660 + }, + { + "epoch": 0.7700338367582735, + "grad_norm": 2.611072491131863, + "learning_rate": 3.972005515121089e-07, + "loss": 0.4933, + "step": 18661 + }, + { + "epoch": 0.7700751010976314, + "grad_norm": 19.26335963256468, + "learning_rate": 3.970646685718504e-07, + "loss": 0.5465, + "step": 18662 + }, + { + "epoch": 0.7701163654369894, + "grad_norm": 3.6875731922809636, + "learning_rate": 3.9692880533277616e-07, + "loss": 0.499, + "step": 18663 + }, + { + "epoch": 0.7701576297763473, + "grad_norm": 3.0395470395194555, + "learning_rate": 3.967929617973128e-07, + "loss": 0.482, + "step": 18664 + }, + { + "epoch": 0.7701988941157052, + "grad_norm": 2.5289758426638387, + "learning_rate": 3.966571379678874e-07, + "loss": 0.5051, + "step": 18665 + }, + { + "epoch": 0.7702401584550631, + "grad_norm": 2.7611306307397494, + "learning_rate": 3.965213338469252e-07, + "loss": 0.5261, + "step": 18666 + }, + { + "epoch": 0.770281422794421, + "grad_norm": 3.9210243405987524, + "learning_rate": 3.963855494368529e-07, + "loss": 0.5224, + "step": 18667 + }, + { + "epoch": 0.7703226871337789, + "grad_norm": 4.505302660216185, + "learning_rate": 3.962497847400951e-07, + "loss": 0.5023, + "step": 18668 + }, + { + "epoch": 0.770363951473137, + "grad_norm": 2.2675161813993423, + "learning_rate": 3.961140397590773e-07, + "loss": 0.4632, + "step": 18669 + }, + { + "epoch": 0.7704052158124949, + "grad_norm": 2.6653294883965613, + "learning_rate": 3.9597831449622467e-07, + "loss": 0.4764, + "step": 18670 + }, + { + "epoch": 0.7704464801518528, + "grad_norm": 2.893257678871122, + "learning_rate": 3.9584260895396086e-07, + "loss": 0.494, + "step": 18671 + }, + { + "epoch": 0.7704877444912107, + "grad_norm": 3.682157078599509, + "learning_rate": 3.9570692313471005e-07, + "loss": 0.5021, + "step": 18672 + }, + { + "epoch": 0.7705290088305686, + "grad_norm": 4.4914723533597645, + "learning_rate": 3.9557125704089623e-07, + "loss": 0.5194, + "step": 18673 + }, + { + "epoch": 0.7705702731699265, + "grad_norm": 3.428743396624562, + "learning_rate": 3.954356106749429e-07, + "loss": 0.5376, + "step": 18674 + }, + { + "epoch": 0.7706115375092845, + "grad_norm": 2.062257425290585, + "learning_rate": 3.952999840392725e-07, + "loss": 0.545, + "step": 18675 + }, + { + "epoch": 0.7706528018486424, + "grad_norm": 2.3080767286787087, + "learning_rate": 3.951643771363079e-07, + "loss": 0.5755, + "step": 18676 + }, + { + "epoch": 0.7706940661880003, + "grad_norm": 2.7557987717966985, + "learning_rate": 3.950287899684714e-07, + "loss": 0.5081, + "step": 18677 + }, + { + "epoch": 0.7707353305273582, + "grad_norm": 2.5084449564345332, + "learning_rate": 3.948932225381852e-07, + "loss": 0.503, + "step": 18678 + }, + { + "epoch": 0.7707765948667162, + "grad_norm": 3.8155293018270715, + "learning_rate": 3.947576748478704e-07, + "loss": 0.4864, + "step": 18679 + }, + { + "epoch": 0.7708178592060742, + "grad_norm": 10.477854940344539, + "learning_rate": 3.9462214689994866e-07, + "loss": 0.539, + "step": 18680 + }, + { + "epoch": 0.7708591235454321, + "grad_norm": 2.1053829234936847, + "learning_rate": 3.944866386968403e-07, + "loss": 0.4676, + "step": 18681 + }, + { + "epoch": 0.77090038788479, + "grad_norm": 23.384992740029247, + "learning_rate": 3.943511502409665e-07, + "loss": 0.5339, + "step": 18682 + }, + { + "epoch": 0.7709416522241479, + "grad_norm": 4.590165650415172, + "learning_rate": 3.942156815347466e-07, + "loss": 0.494, + "step": 18683 + }, + { + "epoch": 0.7709829165635058, + "grad_norm": 9.388605230960538, + "learning_rate": 3.9408023258060105e-07, + "loss": 0.5003, + "step": 18684 + }, + { + "epoch": 0.7710241809028637, + "grad_norm": 4.077089827490649, + "learning_rate": 3.93944803380949e-07, + "loss": 0.4934, + "step": 18685 + }, + { + "epoch": 0.7710654452422216, + "grad_norm": 5.997081207922831, + "learning_rate": 3.938093939382101e-07, + "loss": 0.5722, + "step": 18686 + }, + { + "epoch": 0.7711067095815796, + "grad_norm": 4.272164055294268, + "learning_rate": 3.936740042548023e-07, + "loss": 0.5358, + "step": 18687 + }, + { + "epoch": 0.7711479739209375, + "grad_norm": 2.312748000402155, + "learning_rate": 3.9353863433314445e-07, + "loss": 0.5273, + "step": 18688 + }, + { + "epoch": 0.7711892382602955, + "grad_norm": 2.3768236791300996, + "learning_rate": 3.9340328417565443e-07, + "loss": 0.5362, + "step": 18689 + }, + { + "epoch": 0.7712305025996534, + "grad_norm": 5.459040496929917, + "learning_rate": 3.932679537847506e-07, + "loss": 0.5022, + "step": 18690 + }, + { + "epoch": 0.7712717669390113, + "grad_norm": 19.675448763214263, + "learning_rate": 3.9313264316284916e-07, + "loss": 0.4891, + "step": 18691 + }, + { + "epoch": 0.7713130312783693, + "grad_norm": 3.2102889974369067, + "learning_rate": 3.929973523123677e-07, + "loss": 0.4991, + "step": 18692 + }, + { + "epoch": 0.7713542956177272, + "grad_norm": 3.5305643795022155, + "learning_rate": 3.9286208123572336e-07, + "loss": 0.5629, + "step": 18693 + }, + { + "epoch": 0.7713955599570851, + "grad_norm": 3.9559647938400793, + "learning_rate": 3.927268299353313e-07, + "loss": 0.5249, + "step": 18694 + }, + { + "epoch": 0.771436824296443, + "grad_norm": 4.188369133166613, + "learning_rate": 3.925915984136085e-07, + "loss": 0.5089, + "step": 18695 + }, + { + "epoch": 0.7714780886358009, + "grad_norm": 3.322432091595873, + "learning_rate": 3.924563866729697e-07, + "loss": 0.4888, + "step": 18696 + }, + { + "epoch": 0.7715193529751588, + "grad_norm": 4.251018758600991, + "learning_rate": 3.923211947158305e-07, + "loss": 0.49, + "step": 18697 + }, + { + "epoch": 0.7715606173145168, + "grad_norm": 3.291644614740896, + "learning_rate": 3.9218602254460565e-07, + "loss": 0.544, + "step": 18698 + }, + { + "epoch": 0.7716018816538747, + "grad_norm": 1.8588773416474158, + "learning_rate": 3.9205087016171034e-07, + "loss": 0.4869, + "step": 18699 + }, + { + "epoch": 0.7716431459932327, + "grad_norm": 2.032037857381618, + "learning_rate": 3.919157375695575e-07, + "loss": 0.5616, + "step": 18700 + }, + { + "epoch": 0.7716844103325906, + "grad_norm": 1.8940441987475636, + "learning_rate": 3.917806247705618e-07, + "loss": 0.4725, + "step": 18701 + }, + { + "epoch": 0.7717256746719485, + "grad_norm": 2.5075831660302375, + "learning_rate": 3.916455317671368e-07, + "loss": 0.508, + "step": 18702 + }, + { + "epoch": 0.7717669390113064, + "grad_norm": 2.6704523647369376, + "learning_rate": 3.915104585616949e-07, + "loss": 0.5545, + "step": 18703 + }, + { + "epoch": 0.7718082033506644, + "grad_norm": 4.041453930626788, + "learning_rate": 3.913754051566491e-07, + "loss": 0.531, + "step": 18704 + }, + { + "epoch": 0.7718494676900223, + "grad_norm": 5.017880803521712, + "learning_rate": 3.9124037155441205e-07, + "loss": 0.5311, + "step": 18705 + }, + { + "epoch": 0.7718907320293802, + "grad_norm": 2.8993181414000033, + "learning_rate": 3.9110535775739617e-07, + "loss": 0.5054, + "step": 18706 + }, + { + "epoch": 0.7719319963687381, + "grad_norm": 3.0422648632497045, + "learning_rate": 3.909703637680124e-07, + "loss": 0.489, + "step": 18707 + }, + { + "epoch": 0.771973260708096, + "grad_norm": 27.384469298518958, + "learning_rate": 3.90835389588672e-07, + "loss": 0.4985, + "step": 18708 + }, + { + "epoch": 0.7720145250474539, + "grad_norm": 2.3299875021231298, + "learning_rate": 3.907004352217862e-07, + "loss": 0.488, + "step": 18709 + }, + { + "epoch": 0.772055789386812, + "grad_norm": 2.650333833081302, + "learning_rate": 3.905655006697657e-07, + "loss": 0.5497, + "step": 18710 + }, + { + "epoch": 0.7720970537261699, + "grad_norm": 6.745961936433225, + "learning_rate": 3.9043058593502134e-07, + "loss": 0.5624, + "step": 18711 + }, + { + "epoch": 0.7721383180655278, + "grad_norm": 3.464102694299727, + "learning_rate": 3.902956910199617e-07, + "loss": 0.49, + "step": 18712 + }, + { + "epoch": 0.7721795824048857, + "grad_norm": 2.832371291296219, + "learning_rate": 3.901608159269972e-07, + "loss": 0.4599, + "step": 18713 + }, + { + "epoch": 0.7722208467442436, + "grad_norm": 2.649202920519378, + "learning_rate": 3.900259606585369e-07, + "loss": 0.52, + "step": 18714 + }, + { + "epoch": 0.7722621110836015, + "grad_norm": 2.3671312863884206, + "learning_rate": 3.898911252169902e-07, + "loss": 0.5041, + "step": 18715 + }, + { + "epoch": 0.7723033754229595, + "grad_norm": 2.1342116652548295, + "learning_rate": 3.8975630960476455e-07, + "loss": 0.5037, + "step": 18716 + }, + { + "epoch": 0.7723446397623174, + "grad_norm": 3.0817450821548027, + "learning_rate": 3.896215138242686e-07, + "loss": 0.5916, + "step": 18717 + }, + { + "epoch": 0.7723859041016753, + "grad_norm": 4.141410956298082, + "learning_rate": 3.894867378779102e-07, + "loss": 0.4976, + "step": 18718 + }, + { + "epoch": 0.7724271684410332, + "grad_norm": 5.772819133418717, + "learning_rate": 3.893519817680972e-07, + "loss": 0.5274, + "step": 18719 + }, + { + "epoch": 0.7724684327803912, + "grad_norm": 2.9431824468206518, + "learning_rate": 3.892172454972362e-07, + "loss": 0.5524, + "step": 18720 + }, + { + "epoch": 0.7725096971197492, + "grad_norm": 4.854021383305337, + "learning_rate": 3.890825290677336e-07, + "loss": 0.5308, + "step": 18721 + }, + { + "epoch": 0.7725509614591071, + "grad_norm": 2.359689008827216, + "learning_rate": 3.889478324819959e-07, + "loss": 0.492, + "step": 18722 + }, + { + "epoch": 0.772592225798465, + "grad_norm": 3.3859391655104094, + "learning_rate": 3.8881315574242995e-07, + "loss": 0.5342, + "step": 18723 + }, + { + "epoch": 0.7726334901378229, + "grad_norm": 3.493802383357522, + "learning_rate": 3.8867849885144023e-07, + "loss": 0.4979, + "step": 18724 + }, + { + "epoch": 0.7726747544771808, + "grad_norm": 3.3014759227874815, + "learning_rate": 3.885438618114328e-07, + "loss": 0.5295, + "step": 18725 + }, + { + "epoch": 0.7727160188165387, + "grad_norm": 2.917899937040086, + "learning_rate": 3.8840924462481235e-07, + "loss": 0.4775, + "step": 18726 + }, + { + "epoch": 0.7727572831558966, + "grad_norm": 2.8934022687909606, + "learning_rate": 3.88274647293984e-07, + "loss": 0.5298, + "step": 18727 + }, + { + "epoch": 0.7727985474952546, + "grad_norm": 3.8138539965055998, + "learning_rate": 3.8814006982135103e-07, + "loss": 0.4797, + "step": 18728 + }, + { + "epoch": 0.7728398118346125, + "grad_norm": 3.108088741193714, + "learning_rate": 3.880055122093179e-07, + "loss": 0.5068, + "step": 18729 + }, + { + "epoch": 0.7728810761739705, + "grad_norm": 2.900971069162769, + "learning_rate": 3.8787097446028816e-07, + "loss": 0.4818, + "step": 18730 + }, + { + "epoch": 0.7729223405133284, + "grad_norm": 24.19180889708685, + "learning_rate": 3.8773645657666544e-07, + "loss": 0.5426, + "step": 18731 + }, + { + "epoch": 0.7729636048526863, + "grad_norm": 3.074429605453299, + "learning_rate": 3.8760195856085144e-07, + "loss": 0.4943, + "step": 18732 + }, + { + "epoch": 0.7730048691920443, + "grad_norm": 12.230101229724369, + "learning_rate": 3.8746748041524994e-07, + "loss": 0.532, + "step": 18733 + }, + { + "epoch": 0.7730461335314022, + "grad_norm": 4.951706766349979, + "learning_rate": 3.873330221422618e-07, + "loss": 0.5112, + "step": 18734 + }, + { + "epoch": 0.7730873978707601, + "grad_norm": 3.0530719557414705, + "learning_rate": 3.871985837442893e-07, + "loss": 0.5089, + "step": 18735 + }, + { + "epoch": 0.773128662210118, + "grad_norm": 2.5197725229191077, + "learning_rate": 3.870641652237343e-07, + "loss": 0.4944, + "step": 18736 + }, + { + "epoch": 0.7731699265494759, + "grad_norm": 3.2847051459745833, + "learning_rate": 3.8692976658299714e-07, + "loss": 0.5161, + "step": 18737 + }, + { + "epoch": 0.7732111908888338, + "grad_norm": 11.143118297721825, + "learning_rate": 3.8679538782447873e-07, + "loss": 0.4837, + "step": 18738 + }, + { + "epoch": 0.7732524552281917, + "grad_norm": 5.018688552092978, + "learning_rate": 3.8666102895058004e-07, + "loss": 0.4646, + "step": 18739 + }, + { + "epoch": 0.7732937195675498, + "grad_norm": 3.2230919185563254, + "learning_rate": 3.8652668996370005e-07, + "loss": 0.4833, + "step": 18740 + }, + { + "epoch": 0.7733349839069077, + "grad_norm": 13.9743972198382, + "learning_rate": 3.8639237086623887e-07, + "loss": 0.5554, + "step": 18741 + }, + { + "epoch": 0.7733762482462656, + "grad_norm": 3.8758227603962827, + "learning_rate": 3.8625807166059566e-07, + "loss": 0.5295, + "step": 18742 + }, + { + "epoch": 0.7734175125856235, + "grad_norm": 3.010066814901284, + "learning_rate": 3.8612379234916997e-07, + "loss": 0.5199, + "step": 18743 + }, + { + "epoch": 0.7734587769249814, + "grad_norm": 4.106104596988959, + "learning_rate": 3.859895329343594e-07, + "loss": 0.4944, + "step": 18744 + }, + { + "epoch": 0.7735000412643394, + "grad_norm": 3.019581952864375, + "learning_rate": 3.858552934185626e-07, + "loss": 0.4729, + "step": 18745 + }, + { + "epoch": 0.7735413056036973, + "grad_norm": 2.0681305171627873, + "learning_rate": 3.8572107380417786e-07, + "loss": 0.473, + "step": 18746 + }, + { + "epoch": 0.7735825699430552, + "grad_norm": 6.110969259485333, + "learning_rate": 3.8558687409360174e-07, + "loss": 0.4848, + "step": 18747 + }, + { + "epoch": 0.7736238342824131, + "grad_norm": 3.867147715356536, + "learning_rate": 3.854526942892324e-07, + "loss": 0.5055, + "step": 18748 + }, + { + "epoch": 0.773665098621771, + "grad_norm": 2.861669694498423, + "learning_rate": 3.853185343934657e-07, + "loss": 0.4643, + "step": 18749 + }, + { + "epoch": 0.773706362961129, + "grad_norm": 77.01887802439717, + "learning_rate": 3.8518439440869834e-07, + "loss": 0.4877, + "step": 18750 + }, + { + "epoch": 0.773747627300487, + "grad_norm": 9.367938036492777, + "learning_rate": 3.850502743373266e-07, + "loss": 0.574, + "step": 18751 + }, + { + "epoch": 0.7737888916398449, + "grad_norm": 2.8227803987857945, + "learning_rate": 3.8491617418174667e-07, + "loss": 0.4797, + "step": 18752 + }, + { + "epoch": 0.7738301559792028, + "grad_norm": 4.7433133206493086, + "learning_rate": 3.8478209394435285e-07, + "loss": 0.512, + "step": 18753 + }, + { + "epoch": 0.7738714203185607, + "grad_norm": 2.5730109959804777, + "learning_rate": 3.846480336275408e-07, + "loss": 0.5065, + "step": 18754 + }, + { + "epoch": 0.7739126846579186, + "grad_norm": 4.41922015154417, + "learning_rate": 3.845139932337049e-07, + "loss": 0.5345, + "step": 18755 + }, + { + "epoch": 0.7739539489972765, + "grad_norm": 2.9082900974058328, + "learning_rate": 3.8437997276524007e-07, + "loss": 0.4883, + "step": 18756 + }, + { + "epoch": 0.7739952133366345, + "grad_norm": 2.8653355785571244, + "learning_rate": 3.8424597222453934e-07, + "loss": 0.5026, + "step": 18757 + }, + { + "epoch": 0.7740364776759924, + "grad_norm": 9.626478536595439, + "learning_rate": 3.841119916139968e-07, + "loss": 0.4843, + "step": 18758 + }, + { + "epoch": 0.7740777420153503, + "grad_norm": 2.7664391642035784, + "learning_rate": 3.83978030936006e-07, + "loss": 0.5389, + "step": 18759 + }, + { + "epoch": 0.7741190063547082, + "grad_norm": 2.4627770225408434, + "learning_rate": 3.8384409019295944e-07, + "loss": 0.5495, + "step": 18760 + }, + { + "epoch": 0.7741602706940662, + "grad_norm": 4.14368508006878, + "learning_rate": 3.837101693872492e-07, + "loss": 0.552, + "step": 18761 + }, + { + "epoch": 0.7742015350334241, + "grad_norm": 1.888603922156152, + "learning_rate": 3.835762685212679e-07, + "loss": 0.5378, + "step": 18762 + }, + { + "epoch": 0.7742427993727821, + "grad_norm": 3.7739061720992195, + "learning_rate": 3.834423875974071e-07, + "loss": 0.5195, + "step": 18763 + }, + { + "epoch": 0.77428406371214, + "grad_norm": 2.1982307674289614, + "learning_rate": 3.833085266180592e-07, + "loss": 0.4964, + "step": 18764 + }, + { + "epoch": 0.7743253280514979, + "grad_norm": 5.242484571971122, + "learning_rate": 3.831746855856139e-07, + "loss": 0.5173, + "step": 18765 + }, + { + "epoch": 0.7743665923908558, + "grad_norm": 2.6478918802580678, + "learning_rate": 3.830408645024627e-07, + "loss": 0.5401, + "step": 18766 + }, + { + "epoch": 0.7744078567302137, + "grad_norm": 3.1886305410499887, + "learning_rate": 3.8290706337099574e-07, + "loss": 0.4078, + "step": 18767 + }, + { + "epoch": 0.7744491210695716, + "grad_norm": 4.018354964773464, + "learning_rate": 3.827732821936037e-07, + "loss": 0.5701, + "step": 18768 + }, + { + "epoch": 0.7744903854089296, + "grad_norm": 3.3194022770331726, + "learning_rate": 3.826395209726753e-07, + "loss": 0.5236, + "step": 18769 + }, + { + "epoch": 0.7745316497482875, + "grad_norm": 4.380194282581732, + "learning_rate": 3.825057797106001e-07, + "loss": 0.4969, + "step": 18770 + }, + { + "epoch": 0.7745729140876455, + "grad_norm": 3.529691061849499, + "learning_rate": 3.823720584097675e-07, + "loss": 0.4549, + "step": 18771 + }, + { + "epoch": 0.7746141784270034, + "grad_norm": 4.396630436266918, + "learning_rate": 3.8223835707256604e-07, + "loss": 0.5754, + "step": 18772 + }, + { + "epoch": 0.7746554427663613, + "grad_norm": 5.38647810793789, + "learning_rate": 3.821046757013838e-07, + "loss": 0.5064, + "step": 18773 + }, + { + "epoch": 0.7746967071057193, + "grad_norm": 2.8956248282796127, + "learning_rate": 3.819710142986081e-07, + "loss": 0.4941, + "step": 18774 + }, + { + "epoch": 0.7747379714450772, + "grad_norm": 3.0596129332524535, + "learning_rate": 3.81837372866627e-07, + "loss": 0.4836, + "step": 18775 + }, + { + "epoch": 0.7747792357844351, + "grad_norm": 2.9268948088308564, + "learning_rate": 3.8170375140782766e-07, + "loss": 0.5487, + "step": 18776 + }, + { + "epoch": 0.774820500123793, + "grad_norm": 7.264470859679015, + "learning_rate": 3.8157014992459735e-07, + "loss": 0.5017, + "step": 18777 + }, + { + "epoch": 0.7748617644631509, + "grad_norm": 9.669284698612703, + "learning_rate": 3.8143656841932143e-07, + "loss": 0.5233, + "step": 18778 + }, + { + "epoch": 0.7749030288025088, + "grad_norm": 5.562257656930067, + "learning_rate": 3.813030068943868e-07, + "loss": 0.5265, + "step": 18779 + }, + { + "epoch": 0.7749442931418667, + "grad_norm": 3.586058465868867, + "learning_rate": 3.811694653521793e-07, + "loss": 0.4851, + "step": 18780 + }, + { + "epoch": 0.7749855574812248, + "grad_norm": 2.9723706312892104, + "learning_rate": 3.8103594379508365e-07, + "loss": 0.4885, + "step": 18781 + }, + { + "epoch": 0.7750268218205827, + "grad_norm": 6.732452666696181, + "learning_rate": 3.8090244222548535e-07, + "loss": 0.5525, + "step": 18782 + }, + { + "epoch": 0.7750680861599406, + "grad_norm": 6.132448557121428, + "learning_rate": 3.80768960645769e-07, + "loss": 0.4719, + "step": 18783 + }, + { + "epoch": 0.7751093504992985, + "grad_norm": 2.6672917372012255, + "learning_rate": 3.8063549905831915e-07, + "loss": 0.437, + "step": 18784 + }, + { + "epoch": 0.7751506148386564, + "grad_norm": 13.292586820126216, + "learning_rate": 3.8050205746551967e-07, + "loss": 0.5174, + "step": 18785 + }, + { + "epoch": 0.7751918791780144, + "grad_norm": 2.4156188683796156, + "learning_rate": 3.803686358697535e-07, + "loss": 0.5218, + "step": 18786 + }, + { + "epoch": 0.7752331435173723, + "grad_norm": 3.0778767882992315, + "learning_rate": 3.8023523427340433e-07, + "loss": 0.5533, + "step": 18787 + }, + { + "epoch": 0.7752744078567302, + "grad_norm": 3.8744448421070503, + "learning_rate": 3.8010185267885526e-07, + "loss": 0.5236, + "step": 18788 + }, + { + "epoch": 0.7753156721960881, + "grad_norm": 4.100753306776609, + "learning_rate": 3.79968491088489e-07, + "loss": 0.5023, + "step": 18789 + }, + { + "epoch": 0.775356936535446, + "grad_norm": 2.268208085705519, + "learning_rate": 3.798351495046868e-07, + "loss": 0.5215, + "step": 18790 + }, + { + "epoch": 0.775398200874804, + "grad_norm": 2.190254727223581, + "learning_rate": 3.797018279298313e-07, + "loss": 0.4378, + "step": 18791 + }, + { + "epoch": 0.775439465214162, + "grad_norm": 2.3381916607973934, + "learning_rate": 3.795685263663036e-07, + "loss": 0.5018, + "step": 18792 + }, + { + "epoch": 0.7754807295535199, + "grad_norm": 2.993334403479601, + "learning_rate": 3.7943524481648536e-07, + "loss": 0.5498, + "step": 18793 + }, + { + "epoch": 0.7755219938928778, + "grad_norm": 2.830727946529583, + "learning_rate": 3.793019832827564e-07, + "loss": 0.5154, + "step": 18794 + }, + { + "epoch": 0.7755632582322357, + "grad_norm": 7.473829518379394, + "learning_rate": 3.791687417674977e-07, + "loss": 0.5089, + "step": 18795 + }, + { + "epoch": 0.7756045225715936, + "grad_norm": 33.61101890565701, + "learning_rate": 3.790355202730889e-07, + "loss": 0.5487, + "step": 18796 + }, + { + "epoch": 0.7756457869109515, + "grad_norm": 3.539652732631884, + "learning_rate": 3.789023188019106e-07, + "loss": 0.5873, + "step": 18797 + }, + { + "epoch": 0.7756870512503095, + "grad_norm": 9.807327619977904, + "learning_rate": 3.787691373563414e-07, + "loss": 0.5249, + "step": 18798 + }, + { + "epoch": 0.7757283155896674, + "grad_norm": 2.582827012214336, + "learning_rate": 3.786359759387598e-07, + "loss": 0.5701, + "step": 18799 + }, + { + "epoch": 0.7757695799290253, + "grad_norm": 4.114090586241253, + "learning_rate": 3.7850283455154494e-07, + "loss": 0.4978, + "step": 18800 + }, + { + "epoch": 0.7758108442683833, + "grad_norm": 2.1111554384263522, + "learning_rate": 3.783697131970754e-07, + "loss": 0.5204, + "step": 18801 + }, + { + "epoch": 0.7758521086077412, + "grad_norm": 7.616066376026102, + "learning_rate": 3.7823661187772833e-07, + "loss": 0.4682, + "step": 18802 + }, + { + "epoch": 0.7758933729470991, + "grad_norm": 5.629732495896099, + "learning_rate": 3.781035305958815e-07, + "loss": 0.5282, + "step": 18803 + }, + { + "epoch": 0.7759346372864571, + "grad_norm": 3.4302274421862107, + "learning_rate": 3.779704693539122e-07, + "loss": 0.5313, + "step": 18804 + }, + { + "epoch": 0.775975901625815, + "grad_norm": 8.566305912307861, + "learning_rate": 3.778374281541976e-07, + "loss": 0.5108, + "step": 18805 + }, + { + "epoch": 0.7760171659651729, + "grad_norm": 2.6472095631404766, + "learning_rate": 3.7770440699911337e-07, + "loss": 0.4649, + "step": 18806 + }, + { + "epoch": 0.7760584303045308, + "grad_norm": 4.019370687024561, + "learning_rate": 3.7757140589103596e-07, + "loss": 0.5227, + "step": 18807 + }, + { + "epoch": 0.7760996946438887, + "grad_norm": 6.2935775848039155, + "learning_rate": 3.774384248323412e-07, + "loss": 0.5281, + "step": 18808 + }, + { + "epoch": 0.7761409589832466, + "grad_norm": 4.697417864448799, + "learning_rate": 3.773054638254047e-07, + "loss": 0.5151, + "step": 18809 + }, + { + "epoch": 0.7761822233226046, + "grad_norm": 4.740577351894625, + "learning_rate": 3.771725228726009e-07, + "loss": 0.5298, + "step": 18810 + }, + { + "epoch": 0.7762234876619626, + "grad_norm": 5.991076049789108, + "learning_rate": 3.7703960197630455e-07, + "loss": 0.5502, + "step": 18811 + }, + { + "epoch": 0.7762647520013205, + "grad_norm": 2.7408925647691413, + "learning_rate": 3.7690670113889075e-07, + "loss": 0.5124, + "step": 18812 + }, + { + "epoch": 0.7763060163406784, + "grad_norm": 7.670630569779063, + "learning_rate": 3.7677382036273227e-07, + "loss": 0.5167, + "step": 18813 + }, + { + "epoch": 0.7763472806800363, + "grad_norm": 3.075907829283578, + "learning_rate": 3.7664095965020356e-07, + "loss": 0.4859, + "step": 18814 + }, + { + "epoch": 0.7763885450193942, + "grad_norm": 7.1263175532550145, + "learning_rate": 3.765081190036772e-07, + "loss": 0.5006, + "step": 18815 + }, + { + "epoch": 0.7764298093587522, + "grad_norm": 3.8904464759371766, + "learning_rate": 3.7637529842552646e-07, + "loss": 0.5076, + "step": 18816 + }, + { + "epoch": 0.7764710736981101, + "grad_norm": 3.1156700591303257, + "learning_rate": 3.7624249791812415e-07, + "loss": 0.526, + "step": 18817 + }, + { + "epoch": 0.776512338037468, + "grad_norm": 11.152112651713335, + "learning_rate": 3.761097174838415e-07, + "loss": 0.5407, + "step": 18818 + }, + { + "epoch": 0.7765536023768259, + "grad_norm": 2.568236120050429, + "learning_rate": 3.7597695712505095e-07, + "loss": 0.4582, + "step": 18819 + }, + { + "epoch": 0.7765948667161838, + "grad_norm": 2.2719388857637703, + "learning_rate": 3.758442168441237e-07, + "loss": 0.481, + "step": 18820 + }, + { + "epoch": 0.7766361310555419, + "grad_norm": 3.6938290672821315, + "learning_rate": 3.757114966434315e-07, + "loss": 0.5062, + "step": 18821 + }, + { + "epoch": 0.7766773953948998, + "grad_norm": 5.8465837303438555, + "learning_rate": 3.7557879652534403e-07, + "loss": 0.5441, + "step": 18822 + }, + { + "epoch": 0.7767186597342577, + "grad_norm": 3.0463024501536236, + "learning_rate": 3.7544611649223227e-07, + "loss": 0.5224, + "step": 18823 + }, + { + "epoch": 0.7767599240736156, + "grad_norm": 11.467463655806169, + "learning_rate": 3.7531345654646593e-07, + "loss": 0.543, + "step": 18824 + }, + { + "epoch": 0.7768011884129735, + "grad_norm": 2.7139871047811828, + "learning_rate": 3.751808166904152e-07, + "loss": 0.5047, + "step": 18825 + }, + { + "epoch": 0.7768424527523314, + "grad_norm": 2.492013324637166, + "learning_rate": 3.7504819692644904e-07, + "loss": 0.4822, + "step": 18826 + }, + { + "epoch": 0.7768837170916894, + "grad_norm": 3.9112731397578213, + "learning_rate": 3.749155972569357e-07, + "loss": 0.5417, + "step": 18827 + }, + { + "epoch": 0.7769249814310473, + "grad_norm": 2.865824437043602, + "learning_rate": 3.747830176842444e-07, + "loss": 0.5159, + "step": 18828 + }, + { + "epoch": 0.7769662457704052, + "grad_norm": 4.710230675876246, + "learning_rate": 3.746504582107434e-07, + "loss": 0.5368, + "step": 18829 + }, + { + "epoch": 0.7770075101097631, + "grad_norm": 2.28471232063143, + "learning_rate": 3.7451791883880065e-07, + "loss": 0.4938, + "step": 18830 + }, + { + "epoch": 0.777048774449121, + "grad_norm": 3.4280933745713216, + "learning_rate": 3.743853995707831e-07, + "loss": 0.5495, + "step": 18831 + }, + { + "epoch": 0.777090038788479, + "grad_norm": 2.7067715432791704, + "learning_rate": 3.7425290040905804e-07, + "loss": 0.4995, + "step": 18832 + }, + { + "epoch": 0.777131303127837, + "grad_norm": 2.55502610391658, + "learning_rate": 3.741204213559925e-07, + "loss": 0.4889, + "step": 18833 + }, + { + "epoch": 0.7771725674671949, + "grad_norm": 2.623941721801111, + "learning_rate": 3.73987962413953e-07, + "loss": 0.5207, + "step": 18834 + }, + { + "epoch": 0.7772138318065528, + "grad_norm": 2.6838012181719746, + "learning_rate": 3.738555235853051e-07, + "loss": 0.5382, + "step": 18835 + }, + { + "epoch": 0.7772550961459107, + "grad_norm": 2.6983443240019103, + "learning_rate": 3.7372310487241463e-07, + "loss": 0.5211, + "step": 18836 + }, + { + "epoch": 0.7772963604852686, + "grad_norm": 3.9493567817608333, + "learning_rate": 3.7359070627764715e-07, + "loss": 0.5392, + "step": 18837 + }, + { + "epoch": 0.7773376248246265, + "grad_norm": 2.5612481781373604, + "learning_rate": 3.7345832780336827e-07, + "loss": 0.5059, + "step": 18838 + }, + { + "epoch": 0.7773788891639845, + "grad_norm": 2.884851409854324, + "learning_rate": 3.733259694519409e-07, + "loss": 0.5158, + "step": 18839 + }, + { + "epoch": 0.7774201535033424, + "grad_norm": 4.495479688027256, + "learning_rate": 3.7319363122573027e-07, + "loss": 0.4874, + "step": 18840 + }, + { + "epoch": 0.7774614178427003, + "grad_norm": 4.961678118064405, + "learning_rate": 3.730613131271002e-07, + "loss": 0.5143, + "step": 18841 + }, + { + "epoch": 0.7775026821820583, + "grad_norm": 5.6877893732894105, + "learning_rate": 3.729290151584148e-07, + "loss": 0.5266, + "step": 18842 + }, + { + "epoch": 0.7775439465214162, + "grad_norm": 2.341108528093804, + "learning_rate": 3.7279673732203613e-07, + "loss": 0.5287, + "step": 18843 + }, + { + "epoch": 0.7775852108607741, + "grad_norm": 2.606564525303338, + "learning_rate": 3.726644796203277e-07, + "loss": 0.5572, + "step": 18844 + }, + { + "epoch": 0.7776264752001321, + "grad_norm": 2.628867303920103, + "learning_rate": 3.725322420556516e-07, + "loss": 0.4774, + "step": 18845 + }, + { + "epoch": 0.77766773953949, + "grad_norm": 12.020572100464879, + "learning_rate": 3.724000246303707e-07, + "loss": 0.5234, + "step": 18846 + }, + { + "epoch": 0.7777090038788479, + "grad_norm": 2.7974656602193653, + "learning_rate": 3.722678273468459e-07, + "loss": 0.4877, + "step": 18847 + }, + { + "epoch": 0.7777502682182058, + "grad_norm": 9.833252687996344, + "learning_rate": 3.721356502074386e-07, + "loss": 0.4714, + "step": 18848 + }, + { + "epoch": 0.7777915325575637, + "grad_norm": 3.146551838736795, + "learning_rate": 3.7200349321451016e-07, + "loss": 0.5282, + "step": 18849 + }, + { + "epoch": 0.7778327968969216, + "grad_norm": 7.738047664100206, + "learning_rate": 3.7187135637042165e-07, + "loss": 0.5321, + "step": 18850 + }, + { + "epoch": 0.7778740612362796, + "grad_norm": 2.4464570468697135, + "learning_rate": 3.717392396775328e-07, + "loss": 0.5706, + "step": 18851 + }, + { + "epoch": 0.7779153255756376, + "grad_norm": 2.6725357074742866, + "learning_rate": 3.716071431382033e-07, + "loss": 0.5019, + "step": 18852 + }, + { + "epoch": 0.7779565899149955, + "grad_norm": 16.566444484473912, + "learning_rate": 3.7147506675479295e-07, + "loss": 0.5017, + "step": 18853 + }, + { + "epoch": 0.7779978542543534, + "grad_norm": 3.8482533462650985, + "learning_rate": 3.7134301052966093e-07, + "loss": 0.4521, + "step": 18854 + }, + { + "epoch": 0.7780391185937113, + "grad_norm": 6.35122250127079, + "learning_rate": 3.712109744651667e-07, + "loss": 0.4666, + "step": 18855 + }, + { + "epoch": 0.7780803829330692, + "grad_norm": 3.614997822225493, + "learning_rate": 3.7107895856366796e-07, + "loss": 0.5445, + "step": 18856 + }, + { + "epoch": 0.7781216472724272, + "grad_norm": 2.905398745897288, + "learning_rate": 3.7094696282752293e-07, + "loss": 0.5001, + "step": 18857 + }, + { + "epoch": 0.7781629116117851, + "grad_norm": 2.3924008665853136, + "learning_rate": 3.7081498725909023e-07, + "loss": 0.5041, + "step": 18858 + }, + { + "epoch": 0.778204175951143, + "grad_norm": 3.4763008166525036, + "learning_rate": 3.7068303186072615e-07, + "loss": 0.4932, + "step": 18859 + }, + { + "epoch": 0.7782454402905009, + "grad_norm": 2.593639646673182, + "learning_rate": 3.7055109663478814e-07, + "loss": 0.5223, + "step": 18860 + }, + { + "epoch": 0.7782867046298588, + "grad_norm": 2.4474473497646567, + "learning_rate": 3.704191815836331e-07, + "loss": 0.4994, + "step": 18861 + }, + { + "epoch": 0.7783279689692169, + "grad_norm": 3.229314082439816, + "learning_rate": 3.7028728670961775e-07, + "loss": 0.4771, + "step": 18862 + }, + { + "epoch": 0.7783692333085748, + "grad_norm": 5.561968998926415, + "learning_rate": 3.7015541201509694e-07, + "loss": 0.5078, + "step": 18863 + }, + { + "epoch": 0.7784104976479327, + "grad_norm": 3.594496162780714, + "learning_rate": 3.700235575024275e-07, + "loss": 0.5655, + "step": 18864 + }, + { + "epoch": 0.7784517619872906, + "grad_norm": 2.3594362660104475, + "learning_rate": 3.6989172317396367e-07, + "loss": 0.5419, + "step": 18865 + }, + { + "epoch": 0.7784930263266485, + "grad_norm": 2.0341053015316843, + "learning_rate": 3.6975990903206066e-07, + "loss": 0.4869, + "step": 18866 + }, + { + "epoch": 0.7785342906660064, + "grad_norm": 19.713436008834467, + "learning_rate": 3.6962811507907365e-07, + "loss": 0.5926, + "step": 18867 + }, + { + "epoch": 0.7785755550053644, + "grad_norm": 4.495862784263001, + "learning_rate": 3.6949634131735575e-07, + "loss": 0.5287, + "step": 18868 + }, + { + "epoch": 0.7786168193447223, + "grad_norm": 4.345435040915648, + "learning_rate": 3.693645877492612e-07, + "loss": 0.5167, + "step": 18869 + }, + { + "epoch": 0.7786580836840802, + "grad_norm": 7.742787552632326, + "learning_rate": 3.6923285437714365e-07, + "loss": 0.5102, + "step": 18870 + }, + { + "epoch": 0.7786993480234381, + "grad_norm": 2.0728992620563043, + "learning_rate": 3.6910114120335646e-07, + "loss": 0.5166, + "step": 18871 + }, + { + "epoch": 0.7787406123627961, + "grad_norm": 3.3853927912846133, + "learning_rate": 3.6896944823025157e-07, + "loss": 0.547, + "step": 18872 + }, + { + "epoch": 0.778781876702154, + "grad_norm": 2.3472763294401133, + "learning_rate": 3.6883777546018177e-07, + "loss": 0.5191, + "step": 18873 + }, + { + "epoch": 0.778823141041512, + "grad_norm": 2.673190532701139, + "learning_rate": 3.68706122895499e-07, + "loss": 0.4946, + "step": 18874 + }, + { + "epoch": 0.7788644053808699, + "grad_norm": 2.085172271577883, + "learning_rate": 3.6857449053855536e-07, + "loss": 0.493, + "step": 18875 + }, + { + "epoch": 0.7789056697202278, + "grad_norm": 3.092182237106173, + "learning_rate": 3.6844287839170117e-07, + "loss": 0.5409, + "step": 18876 + }, + { + "epoch": 0.7789469340595857, + "grad_norm": 12.864796381261305, + "learning_rate": 3.683112864572885e-07, + "loss": 0.5271, + "step": 18877 + }, + { + "epoch": 0.7789881983989436, + "grad_norm": 6.332294626824586, + "learning_rate": 3.6817971473766673e-07, + "loss": 0.4821, + "step": 18878 + }, + { + "epoch": 0.7790294627383015, + "grad_norm": 6.217447199828617, + "learning_rate": 3.680481632351872e-07, + "loss": 0.4886, + "step": 18879 + }, + { + "epoch": 0.7790707270776595, + "grad_norm": 2.3127554027467196, + "learning_rate": 3.679166319521986e-07, + "loss": 0.5387, + "step": 18880 + }, + { + "epoch": 0.7791119914170174, + "grad_norm": 6.029229570662344, + "learning_rate": 3.6778512089105105e-07, + "loss": 0.5247, + "step": 18881 + }, + { + "epoch": 0.7791532557563754, + "grad_norm": 2.1939324975279106, + "learning_rate": 3.676536300540936e-07, + "loss": 0.4578, + "step": 18882 + }, + { + "epoch": 0.7791945200957333, + "grad_norm": 2.740755721633977, + "learning_rate": 3.6752215944367553e-07, + "loss": 0.5208, + "step": 18883 + }, + { + "epoch": 0.7792357844350912, + "grad_norm": 12.94777254931541, + "learning_rate": 3.673907090621443e-07, + "loss": 0.4811, + "step": 18884 + }, + { + "epoch": 0.7792770487744491, + "grad_norm": 2.9884294423659976, + "learning_rate": 3.672592789118482e-07, + "loss": 0.5088, + "step": 18885 + }, + { + "epoch": 0.7793183131138071, + "grad_norm": 4.9295984964237025, + "learning_rate": 3.6712786899513515e-07, + "loss": 0.4635, + "step": 18886 + }, + { + "epoch": 0.779359577453165, + "grad_norm": 2.2283120178312337, + "learning_rate": 3.6699647931435286e-07, + "loss": 0.461, + "step": 18887 + }, + { + "epoch": 0.7794008417925229, + "grad_norm": 4.520712343424838, + "learning_rate": 3.668651098718473e-07, + "loss": 0.5415, + "step": 18888 + }, + { + "epoch": 0.7794421061318808, + "grad_norm": 2.5619490098071527, + "learning_rate": 3.6673376066996556e-07, + "loss": 0.5133, + "step": 18889 + }, + { + "epoch": 0.7794833704712387, + "grad_norm": 3.2707262412060576, + "learning_rate": 3.666024317110543e-07, + "loss": 0.4944, + "step": 18890 + }, + { + "epoch": 0.7795246348105966, + "grad_norm": 10.543832940064235, + "learning_rate": 3.664711229974586e-07, + "loss": 0.5251, + "step": 18891 + }, + { + "epoch": 0.7795658991499546, + "grad_norm": 6.787243503252737, + "learning_rate": 3.663398345315246e-07, + "loss": 0.5126, + "step": 18892 + }, + { + "epoch": 0.7796071634893126, + "grad_norm": 3.2846777169600583, + "learning_rate": 3.662085663155967e-07, + "loss": 0.5079, + "step": 18893 + }, + { + "epoch": 0.7796484278286705, + "grad_norm": 5.6154153669327265, + "learning_rate": 3.660773183520202e-07, + "loss": 0.4889, + "step": 18894 + }, + { + "epoch": 0.7796896921680284, + "grad_norm": 3.3978569247201795, + "learning_rate": 3.6594609064313975e-07, + "loss": 0.5117, + "step": 18895 + }, + { + "epoch": 0.7797309565073863, + "grad_norm": 24.995159707678493, + "learning_rate": 3.6581488319129873e-07, + "loss": 0.5165, + "step": 18896 + }, + { + "epoch": 0.7797722208467442, + "grad_norm": 3.978857418384945, + "learning_rate": 3.656836959988412e-07, + "loss": 0.4973, + "step": 18897 + }, + { + "epoch": 0.7798134851861022, + "grad_norm": 2.9450624151788376, + "learning_rate": 3.655525290681105e-07, + "loss": 0.5075, + "step": 18898 + }, + { + "epoch": 0.7798547495254601, + "grad_norm": 4.159396797862135, + "learning_rate": 3.654213824014498e-07, + "loss": 0.5234, + "step": 18899 + }, + { + "epoch": 0.779896013864818, + "grad_norm": 3.9088536654009416, + "learning_rate": 3.652902560012014e-07, + "loss": 0.4467, + "step": 18900 + }, + { + "epoch": 0.7799372782041759, + "grad_norm": 4.062279130702412, + "learning_rate": 3.6515914986970734e-07, + "loss": 0.4814, + "step": 18901 + }, + { + "epoch": 0.7799785425435338, + "grad_norm": 3.17350877306471, + "learning_rate": 3.6502806400931006e-07, + "loss": 0.572, + "step": 18902 + }, + { + "epoch": 0.7800198068828919, + "grad_norm": 2.0347733101689633, + "learning_rate": 3.64896998422351e-07, + "loss": 0.5184, + "step": 18903 + }, + { + "epoch": 0.7800610712222498, + "grad_norm": 3.003722317534732, + "learning_rate": 3.6476595311117137e-07, + "loss": 0.4994, + "step": 18904 + }, + { + "epoch": 0.7801023355616077, + "grad_norm": 3.1368903173203964, + "learning_rate": 3.646349280781111e-07, + "loss": 0.558, + "step": 18905 + }, + { + "epoch": 0.7801435999009656, + "grad_norm": 2.8117961883573908, + "learning_rate": 3.645039233255115e-07, + "loss": 0.4891, + "step": 18906 + }, + { + "epoch": 0.7801848642403235, + "grad_norm": 15.01658324457904, + "learning_rate": 3.6437293885571227e-07, + "loss": 0.5219, + "step": 18907 + }, + { + "epoch": 0.7802261285796814, + "grad_norm": 2.3474933983818542, + "learning_rate": 3.6424197467105365e-07, + "loss": 0.4716, + "step": 18908 + }, + { + "epoch": 0.7802673929190393, + "grad_norm": 3.3658459439122024, + "learning_rate": 3.641110307738742e-07, + "loss": 0.4803, + "step": 18909 + }, + { + "epoch": 0.7803086572583973, + "grad_norm": 2.7226623896889315, + "learning_rate": 3.6398010716651326e-07, + "loss": 0.5065, + "step": 18910 + }, + { + "epoch": 0.7803499215977552, + "grad_norm": 6.245953377153124, + "learning_rate": 3.638492038513096e-07, + "loss": 0.5102, + "step": 18911 + }, + { + "epoch": 0.7803911859371131, + "grad_norm": 3.9112187996970498, + "learning_rate": 3.637183208306017e-07, + "loss": 0.4849, + "step": 18912 + }, + { + "epoch": 0.7804324502764711, + "grad_norm": 4.836372449353536, + "learning_rate": 3.635874581067268e-07, + "loss": 0.5128, + "step": 18913 + }, + { + "epoch": 0.780473714615829, + "grad_norm": 2.1463893570842663, + "learning_rate": 3.634566156820227e-07, + "loss": 0.4942, + "step": 18914 + }, + { + "epoch": 0.780514978955187, + "grad_norm": 3.6702481233146496, + "learning_rate": 3.633257935588266e-07, + "loss": 0.4956, + "step": 18915 + }, + { + "epoch": 0.7805562432945449, + "grad_norm": 8.013396058316676, + "learning_rate": 3.6319499173947587e-07, + "loss": 0.5107, + "step": 18916 + }, + { + "epoch": 0.7805975076339028, + "grad_norm": 5.49037220923483, + "learning_rate": 3.6306421022630634e-07, + "loss": 0.5125, + "step": 18917 + }, + { + "epoch": 0.7806387719732607, + "grad_norm": 3.7226268467171226, + "learning_rate": 3.629334490216538e-07, + "loss": 0.4679, + "step": 18918 + }, + { + "epoch": 0.7806800363126186, + "grad_norm": 2.624143024772643, + "learning_rate": 3.628027081278545e-07, + "loss": 0.5006, + "step": 18919 + }, + { + "epoch": 0.7807213006519765, + "grad_norm": 2.954943416352947, + "learning_rate": 3.6267198754724397e-07, + "loss": 0.5255, + "step": 18920 + }, + { + "epoch": 0.7807625649913345, + "grad_norm": 2.712386332030767, + "learning_rate": 3.625412872821565e-07, + "loss": 0.5238, + "step": 18921 + }, + { + "epoch": 0.7808038293306924, + "grad_norm": 2.392730004469395, + "learning_rate": 3.62410607334927e-07, + "loss": 0.5202, + "step": 18922 + }, + { + "epoch": 0.7808450936700504, + "grad_norm": 2.7324926928477877, + "learning_rate": 3.6227994770788995e-07, + "loss": 0.4657, + "step": 18923 + }, + { + "epoch": 0.7808863580094083, + "grad_norm": 2.8643398220498333, + "learning_rate": 3.621493084033796e-07, + "loss": 0.5386, + "step": 18924 + }, + { + "epoch": 0.7809276223487662, + "grad_norm": 2.6841229885377946, + "learning_rate": 3.620186894237287e-07, + "loss": 0.5295, + "step": 18925 + }, + { + "epoch": 0.7809688866881241, + "grad_norm": 3.9296364114449602, + "learning_rate": 3.618880907712708e-07, + "loss": 0.4528, + "step": 18926 + }, + { + "epoch": 0.7810101510274821, + "grad_norm": 3.639992620473409, + "learning_rate": 3.6175751244833873e-07, + "loss": 0.4749, + "step": 18927 + }, + { + "epoch": 0.78105141536684, + "grad_norm": 1.85135825620347, + "learning_rate": 3.6162695445726534e-07, + "loss": 0.5095, + "step": 18928 + }, + { + "epoch": 0.7810926797061979, + "grad_norm": 5.987164542636084, + "learning_rate": 3.6149641680038184e-07, + "loss": 0.5434, + "step": 18929 + }, + { + "epoch": 0.7811339440455558, + "grad_norm": 2.311606487145096, + "learning_rate": 3.613658994800209e-07, + "loss": 0.4955, + "step": 18930 + }, + { + "epoch": 0.7811752083849137, + "grad_norm": 2.7059005620954992, + "learning_rate": 3.6123540249851295e-07, + "loss": 0.5323, + "step": 18931 + }, + { + "epoch": 0.7812164727242716, + "grad_norm": 2.0330049408647217, + "learning_rate": 3.6110492585818953e-07, + "loss": 0.4891, + "step": 18932 + }, + { + "epoch": 0.7812577370636297, + "grad_norm": 4.919116711611015, + "learning_rate": 3.609744695613815e-07, + "loss": 0.5168, + "step": 18933 + }, + { + "epoch": 0.7812990014029876, + "grad_norm": 2.665628840550337, + "learning_rate": 3.608440336104187e-07, + "loss": 0.5102, + "step": 18934 + }, + { + "epoch": 0.7813402657423455, + "grad_norm": 2.8679752348316656, + "learning_rate": 3.6071361800763097e-07, + "loss": 0.5516, + "step": 18935 + }, + { + "epoch": 0.7813815300817034, + "grad_norm": 3.0137133541638157, + "learning_rate": 3.6058322275534857e-07, + "loss": 0.5121, + "step": 18936 + }, + { + "epoch": 0.7814227944210613, + "grad_norm": 3.3874302369605536, + "learning_rate": 3.604528478558997e-07, + "loss": 0.5188, + "step": 18937 + }, + { + "epoch": 0.7814640587604192, + "grad_norm": 6.502117057189403, + "learning_rate": 3.6032249331161376e-07, + "loss": 0.5232, + "step": 18938 + }, + { + "epoch": 0.7815053230997772, + "grad_norm": 3.5130987489291416, + "learning_rate": 3.6019215912481903e-07, + "loss": 0.4694, + "step": 18939 + }, + { + "epoch": 0.7815465874391351, + "grad_norm": 2.896635702167942, + "learning_rate": 3.6006184529784424e-07, + "loss": 0.5206, + "step": 18940 + }, + { + "epoch": 0.781587851778493, + "grad_norm": 2.7908021907335145, + "learning_rate": 3.599315518330161e-07, + "loss": 0.576, + "step": 18941 + }, + { + "epoch": 0.7816291161178509, + "grad_norm": 3.1359722153812855, + "learning_rate": 3.5980127873266244e-07, + "loss": 0.4902, + "step": 18942 + }, + { + "epoch": 0.7816703804572089, + "grad_norm": 4.134761754529316, + "learning_rate": 3.5967102599911083e-07, + "loss": 0.4997, + "step": 18943 + }, + { + "epoch": 0.7817116447965669, + "grad_norm": 9.037842536449242, + "learning_rate": 3.595407936346868e-07, + "loss": 0.5638, + "step": 18944 + }, + { + "epoch": 0.7817529091359248, + "grad_norm": 4.952789601552888, + "learning_rate": 3.5941058164171756e-07, + "loss": 0.4961, + "step": 18945 + }, + { + "epoch": 0.7817941734752827, + "grad_norm": 3.350191581843331, + "learning_rate": 3.592803900225284e-07, + "loss": 0.5467, + "step": 18946 + }, + { + "epoch": 0.7818354378146406, + "grad_norm": 4.588004546275283, + "learning_rate": 3.59150218779445e-07, + "loss": 0.5662, + "step": 18947 + }, + { + "epoch": 0.7818767021539985, + "grad_norm": 8.098745381043999, + "learning_rate": 3.590200679147927e-07, + "loss": 0.5499, + "step": 18948 + }, + { + "epoch": 0.7819179664933564, + "grad_norm": 2.0878429248802393, + "learning_rate": 3.588899374308968e-07, + "loss": 0.4944, + "step": 18949 + }, + { + "epoch": 0.7819592308327143, + "grad_norm": 14.205443530658332, + "learning_rate": 3.5875982733008067e-07, + "loss": 0.5077, + "step": 18950 + }, + { + "epoch": 0.7820004951720723, + "grad_norm": 2.9480252816522086, + "learning_rate": 3.586297376146691e-07, + "loss": 0.4772, + "step": 18951 + }, + { + "epoch": 0.7820417595114302, + "grad_norm": 4.118807871282958, + "learning_rate": 3.584996682869856e-07, + "loss": 0.4892, + "step": 18952 + }, + { + "epoch": 0.7820830238507881, + "grad_norm": 2.055875185762502, + "learning_rate": 3.5836961934935397e-07, + "loss": 0.4956, + "step": 18953 + }, + { + "epoch": 0.7821242881901461, + "grad_norm": 3.35415685190675, + "learning_rate": 3.582395908040965e-07, + "loss": 0.5206, + "step": 18954 + }, + { + "epoch": 0.782165552529504, + "grad_norm": 3.258658149970804, + "learning_rate": 3.581095826535362e-07, + "loss": 0.5252, + "step": 18955 + }, + { + "epoch": 0.782206816868862, + "grad_norm": 6.281181455326918, + "learning_rate": 3.5797959489999574e-07, + "loss": 0.4865, + "step": 18956 + }, + { + "epoch": 0.7822480812082199, + "grad_norm": 2.500380295018218, + "learning_rate": 3.5784962754579676e-07, + "loss": 0.4836, + "step": 18957 + }, + { + "epoch": 0.7822893455475778, + "grad_norm": 2.113404653533065, + "learning_rate": 3.5771968059326e-07, + "loss": 0.4383, + "step": 18958 + }, + { + "epoch": 0.7823306098869357, + "grad_norm": 2.666205207091416, + "learning_rate": 3.575897540447074e-07, + "loss": 0.4617, + "step": 18959 + }, + { + "epoch": 0.7823718742262936, + "grad_norm": 2.8128732836981714, + "learning_rate": 3.574598479024596e-07, + "loss": 0.4839, + "step": 18960 + }, + { + "epoch": 0.7824131385656515, + "grad_norm": 3.54406298393431, + "learning_rate": 3.573299621688376e-07, + "loss": 0.4873, + "step": 18961 + }, + { + "epoch": 0.7824544029050094, + "grad_norm": 3.19782864337415, + "learning_rate": 3.572000968461606e-07, + "loss": 0.5326, + "step": 18962 + }, + { + "epoch": 0.7824956672443674, + "grad_norm": 2.7652630261823763, + "learning_rate": 3.5707025193674874e-07, + "loss": 0.5277, + "step": 18963 + }, + { + "epoch": 0.7825369315837254, + "grad_norm": 3.308784873038843, + "learning_rate": 3.5694042744292134e-07, + "loss": 0.5377, + "step": 18964 + }, + { + "epoch": 0.7825781959230833, + "grad_norm": 6.01824989532519, + "learning_rate": 3.568106233669978e-07, + "loss": 0.5336, + "step": 18965 + }, + { + "epoch": 0.7826194602624412, + "grad_norm": 3.322279522183025, + "learning_rate": 3.566808397112957e-07, + "loss": 0.5582, + "step": 18966 + }, + { + "epoch": 0.7826607246017991, + "grad_norm": 2.5222522152423834, + "learning_rate": 3.5655107647813436e-07, + "loss": 0.5601, + "step": 18967 + }, + { + "epoch": 0.7827019889411571, + "grad_norm": 5.259944860451809, + "learning_rate": 3.5642133366983093e-07, + "loss": 0.4783, + "step": 18968 + }, + { + "epoch": 0.782743253280515, + "grad_norm": 2.365750971813324, + "learning_rate": 3.562916112887038e-07, + "loss": 0.4805, + "step": 18969 + }, + { + "epoch": 0.7827845176198729, + "grad_norm": 2.9215265926957965, + "learning_rate": 3.5616190933706974e-07, + "loss": 0.5651, + "step": 18970 + }, + { + "epoch": 0.7828257819592308, + "grad_norm": 6.699091222548901, + "learning_rate": 3.5603222781724483e-07, + "loss": 0.5267, + "step": 18971 + }, + { + "epoch": 0.7828670462985887, + "grad_norm": 2.252776940553442, + "learning_rate": 3.5590256673154625e-07, + "loss": 0.5268, + "step": 18972 + }, + { + "epoch": 0.7829083106379466, + "grad_norm": 2.385830184035127, + "learning_rate": 3.557729260822899e-07, + "loss": 0.513, + "step": 18973 + }, + { + "epoch": 0.7829495749773047, + "grad_norm": 10.07890868478798, + "learning_rate": 3.556433058717919e-07, + "loss": 0.5185, + "step": 18974 + }, + { + "epoch": 0.7829908393166626, + "grad_norm": 5.473869315409342, + "learning_rate": 3.555137061023667e-07, + "loss": 0.5287, + "step": 18975 + }, + { + "epoch": 0.7830321036560205, + "grad_norm": 2.992646773298471, + "learning_rate": 3.5538412677632987e-07, + "loss": 0.5011, + "step": 18976 + }, + { + "epoch": 0.7830733679953784, + "grad_norm": 2.9601129349896778, + "learning_rate": 3.5525456789599633e-07, + "loss": 0.4957, + "step": 18977 + }, + { + "epoch": 0.7831146323347363, + "grad_norm": 2.4672182948882306, + "learning_rate": 3.551250294636794e-07, + "loss": 0.5116, + "step": 18978 + }, + { + "epoch": 0.7831558966740942, + "grad_norm": 3.7529701281013312, + "learning_rate": 3.549955114816935e-07, + "loss": 0.4918, + "step": 18979 + }, + { + "epoch": 0.7831971610134522, + "grad_norm": 2.123132789919031, + "learning_rate": 3.548660139523522e-07, + "loss": 0.5437, + "step": 18980 + }, + { + "epoch": 0.7832384253528101, + "grad_norm": 6.744513485170957, + "learning_rate": 3.54736536877969e-07, + "loss": 0.5525, + "step": 18981 + }, + { + "epoch": 0.783279689692168, + "grad_norm": 3.6000955986228753, + "learning_rate": 3.546070802608562e-07, + "loss": 0.526, + "step": 18982 + }, + { + "epoch": 0.7833209540315259, + "grad_norm": 4.245807265268106, + "learning_rate": 3.544776441033259e-07, + "loss": 0.5511, + "step": 18983 + }, + { + "epoch": 0.7833622183708839, + "grad_norm": 2.364841814317934, + "learning_rate": 3.543482284076906e-07, + "loss": 0.4778, + "step": 18984 + }, + { + "epoch": 0.7834034827102418, + "grad_norm": 3.814656212070597, + "learning_rate": 3.542188331762618e-07, + "loss": 0.5237, + "step": 18985 + }, + { + "epoch": 0.7834447470495998, + "grad_norm": 4.540851997408965, + "learning_rate": 3.540894584113515e-07, + "loss": 0.5152, + "step": 18986 + }, + { + "epoch": 0.7834860113889577, + "grad_norm": 2.0219234096557033, + "learning_rate": 3.539601041152696e-07, + "loss": 0.4917, + "step": 18987 + }, + { + "epoch": 0.7835272757283156, + "grad_norm": 3.7687940622351994, + "learning_rate": 3.5383077029032724e-07, + "loss": 0.4489, + "step": 18988 + }, + { + "epoch": 0.7835685400676735, + "grad_norm": 4.835747551138265, + "learning_rate": 3.537014569388346e-07, + "loss": 0.5182, + "step": 18989 + }, + { + "epoch": 0.7836098044070314, + "grad_norm": 3.559144485586415, + "learning_rate": 3.535721640631019e-07, + "loss": 0.5528, + "step": 18990 + }, + { + "epoch": 0.7836510687463893, + "grad_norm": 3.4546531576727397, + "learning_rate": 3.534428916654379e-07, + "loss": 0.5177, + "step": 18991 + }, + { + "epoch": 0.7836923330857473, + "grad_norm": 4.800175467010033, + "learning_rate": 3.53313639748152e-07, + "loss": 0.5148, + "step": 18992 + }, + { + "epoch": 0.7837335974251052, + "grad_norm": 2.861830863502031, + "learning_rate": 3.5318440831355317e-07, + "loss": 0.5214, + "step": 18993 + }, + { + "epoch": 0.7837748617644632, + "grad_norm": 3.465239350248986, + "learning_rate": 3.5305519736395016e-07, + "loss": 0.5251, + "step": 18994 + }, + { + "epoch": 0.7838161261038211, + "grad_norm": 2.0802077828871166, + "learning_rate": 3.5292600690165e-07, + "loss": 0.5, + "step": 18995 + }, + { + "epoch": 0.783857390443179, + "grad_norm": 3.8955090125152094, + "learning_rate": 3.5279683692896136e-07, + "loss": 0.4846, + "step": 18996 + }, + { + "epoch": 0.783898654782537, + "grad_norm": 2.5885314794464818, + "learning_rate": 3.526676874481907e-07, + "loss": 0.5086, + "step": 18997 + }, + { + "epoch": 0.7839399191218949, + "grad_norm": 2.8693704221191165, + "learning_rate": 3.5253855846164573e-07, + "loss": 0.4637, + "step": 18998 + }, + { + "epoch": 0.7839811834612528, + "grad_norm": 2.5974223732197905, + "learning_rate": 3.5240944997163205e-07, + "loss": 0.5384, + "step": 18999 + }, + { + "epoch": 0.7840224478006107, + "grad_norm": 2.633293144812116, + "learning_rate": 3.5228036198045664e-07, + "loss": 0.4526, + "step": 19000 + }, + { + "epoch": 0.7840637121399686, + "grad_norm": 2.829380674589011, + "learning_rate": 3.5215129449042506e-07, + "loss": 0.5613, + "step": 19001 + }, + { + "epoch": 0.7841049764793265, + "grad_norm": 1.7938590377201529, + "learning_rate": 3.520222475038432e-07, + "loss": 0.541, + "step": 19002 + }, + { + "epoch": 0.7841462408186844, + "grad_norm": 3.4143615646521654, + "learning_rate": 3.5189322102301547e-07, + "loss": 0.5044, + "step": 19003 + }, + { + "epoch": 0.7841875051580425, + "grad_norm": 3.065523819830576, + "learning_rate": 3.517642150502468e-07, + "loss": 0.4914, + "step": 19004 + }, + { + "epoch": 0.7842287694974004, + "grad_norm": 2.9721184985654125, + "learning_rate": 3.516352295878417e-07, + "loss": 0.4513, + "step": 19005 + }, + { + "epoch": 0.7842700338367583, + "grad_norm": 2.3368282274665497, + "learning_rate": 3.515062646381047e-07, + "loss": 0.4512, + "step": 19006 + }, + { + "epoch": 0.7843112981761162, + "grad_norm": 2.4141909343497914, + "learning_rate": 3.513773202033384e-07, + "loss": 0.4508, + "step": 19007 + }, + { + "epoch": 0.7843525625154741, + "grad_norm": 3.3914930127377088, + "learning_rate": 3.5124839628584665e-07, + "loss": 0.5036, + "step": 19008 + }, + { + "epoch": 0.784393826854832, + "grad_norm": 4.2841309217087495, + "learning_rate": 3.511194928879328e-07, + "loss": 0.5325, + "step": 19009 + }, + { + "epoch": 0.78443509119419, + "grad_norm": 17.33447992493576, + "learning_rate": 3.5099061001189826e-07, + "loss": 0.4119, + "step": 19010 + }, + { + "epoch": 0.7844763555335479, + "grad_norm": 3.184724659446224, + "learning_rate": 3.5086174766004623e-07, + "loss": 0.4568, + "step": 19011 + }, + { + "epoch": 0.7845176198729058, + "grad_norm": 3.241628035212077, + "learning_rate": 3.507329058346778e-07, + "loss": 0.5261, + "step": 19012 + }, + { + "epoch": 0.7845588842122637, + "grad_norm": 3.6114495206598614, + "learning_rate": 3.5060408453809464e-07, + "loss": 0.4977, + "step": 19013 + }, + { + "epoch": 0.7846001485516216, + "grad_norm": 2.2603023843802172, + "learning_rate": 3.504752837725981e-07, + "loss": 0.5177, + "step": 19014 + }, + { + "epoch": 0.7846414128909797, + "grad_norm": 3.428922009653072, + "learning_rate": 3.5034650354048847e-07, + "loss": 0.4929, + "step": 19015 + }, + { + "epoch": 0.7846826772303376, + "grad_norm": 4.540561326317718, + "learning_rate": 3.502177438440663e-07, + "loss": 0.5289, + "step": 19016 + }, + { + "epoch": 0.7847239415696955, + "grad_norm": 3.030075557545561, + "learning_rate": 3.500890046856314e-07, + "loss": 0.4809, + "step": 19017 + }, + { + "epoch": 0.7847652059090534, + "grad_norm": 2.3973928611792488, + "learning_rate": 3.4996028606748404e-07, + "loss": 0.541, + "step": 19018 + }, + { + "epoch": 0.7848064702484113, + "grad_norm": 3.4746179830806416, + "learning_rate": 3.498315879919224e-07, + "loss": 0.4789, + "step": 19019 + }, + { + "epoch": 0.7848477345877692, + "grad_norm": 7.852497629151645, + "learning_rate": 3.4970291046124595e-07, + "loss": 0.4789, + "step": 19020 + }, + { + "epoch": 0.7848889989271272, + "grad_norm": 5.808794828892862, + "learning_rate": 3.495742534777531e-07, + "loss": 0.5032, + "step": 19021 + }, + { + "epoch": 0.7849302632664851, + "grad_norm": 2.303657818278071, + "learning_rate": 3.4944561704374256e-07, + "loss": 0.4766, + "step": 19022 + }, + { + "epoch": 0.784971527605843, + "grad_norm": 4.020887396011574, + "learning_rate": 3.4931700116151143e-07, + "loss": 0.505, + "step": 19023 + }, + { + "epoch": 0.7850127919452009, + "grad_norm": 2.269489483817471, + "learning_rate": 3.4918840583335676e-07, + "loss": 0.4805, + "step": 19024 + }, + { + "epoch": 0.7850540562845589, + "grad_norm": 3.028563102827117, + "learning_rate": 3.4905983106157617e-07, + "loss": 0.5585, + "step": 19025 + }, + { + "epoch": 0.7850953206239168, + "grad_norm": 3.8916217848290047, + "learning_rate": 3.4893127684846603e-07, + "loss": 0.5245, + "step": 19026 + }, + { + "epoch": 0.7851365849632748, + "grad_norm": 2.4720389012469117, + "learning_rate": 3.488027431963235e-07, + "loss": 0.5171, + "step": 19027 + }, + { + "epoch": 0.7851778493026327, + "grad_norm": 3.5753890578077097, + "learning_rate": 3.486742301074432e-07, + "loss": 0.5244, + "step": 19028 + }, + { + "epoch": 0.7852191136419906, + "grad_norm": 3.7148673524726807, + "learning_rate": 3.4854573758412136e-07, + "loss": 0.4985, + "step": 19029 + }, + { + "epoch": 0.7852603779813485, + "grad_norm": 3.189117572457154, + "learning_rate": 3.484172656286532e-07, + "loss": 0.4998, + "step": 19030 + }, + { + "epoch": 0.7853016423207064, + "grad_norm": 118.05391003024371, + "learning_rate": 3.4828881424333387e-07, + "loss": 0.4684, + "step": 19031 + }, + { + "epoch": 0.7853429066600643, + "grad_norm": 5.126575640795818, + "learning_rate": 3.481603834304571e-07, + "loss": 0.5589, + "step": 19032 + }, + { + "epoch": 0.7853841709994223, + "grad_norm": 3.077696680490819, + "learning_rate": 3.4803197319231723e-07, + "loss": 0.501, + "step": 19033 + }, + { + "epoch": 0.7854254353387802, + "grad_norm": 2.184617399263876, + "learning_rate": 3.479035835312086e-07, + "loss": 0.5316, + "step": 19034 + }, + { + "epoch": 0.7854666996781382, + "grad_norm": 2.2630210336716896, + "learning_rate": 3.4777521444942405e-07, + "loss": 0.4895, + "step": 19035 + }, + { + "epoch": 0.7855079640174961, + "grad_norm": 4.325205125386714, + "learning_rate": 3.4764686594925625e-07, + "loss": 0.557, + "step": 19036 + }, + { + "epoch": 0.785549228356854, + "grad_norm": 3.224116623816439, + "learning_rate": 3.4751853803299804e-07, + "loss": 0.4969, + "step": 19037 + }, + { + "epoch": 0.785590492696212, + "grad_norm": 3.5008548852921226, + "learning_rate": 3.473902307029419e-07, + "loss": 0.523, + "step": 19038 + }, + { + "epoch": 0.7856317570355699, + "grad_norm": 2.8089136483574313, + "learning_rate": 3.4726194396138e-07, + "loss": 0.4779, + "step": 19039 + }, + { + "epoch": 0.7856730213749278, + "grad_norm": 3.896304392489525, + "learning_rate": 3.471336778106032e-07, + "loss": 0.4639, + "step": 19040 + }, + { + "epoch": 0.7857142857142857, + "grad_norm": 3.004027016593117, + "learning_rate": 3.470054322529028e-07, + "loss": 0.5485, + "step": 19041 + }, + { + "epoch": 0.7857555500536436, + "grad_norm": 2.8410474389108393, + "learning_rate": 3.4687720729056976e-07, + "loss": 0.4895, + "step": 19042 + }, + { + "epoch": 0.7857968143930015, + "grad_norm": 4.033845993482116, + "learning_rate": 3.467490029258948e-07, + "loss": 0.5232, + "step": 19043 + }, + { + "epoch": 0.7858380787323594, + "grad_norm": 4.008433727881337, + "learning_rate": 3.466208191611673e-07, + "loss": 0.5122, + "step": 19044 + }, + { + "epoch": 0.7858793430717175, + "grad_norm": 2.097382880754314, + "learning_rate": 3.464926559986774e-07, + "loss": 0.5031, + "step": 19045 + }, + { + "epoch": 0.7859206074110754, + "grad_norm": 2.818859034072483, + "learning_rate": 3.4636451344071405e-07, + "loss": 0.451, + "step": 19046 + }, + { + "epoch": 0.7859618717504333, + "grad_norm": 2.27022006532632, + "learning_rate": 3.4623639148956714e-07, + "loss": 0.4483, + "step": 19047 + }, + { + "epoch": 0.7860031360897912, + "grad_norm": 2.2006931651940516, + "learning_rate": 3.461082901475243e-07, + "loss": 0.5148, + "step": 19048 + }, + { + "epoch": 0.7860444004291491, + "grad_norm": 30.601965568937803, + "learning_rate": 3.4598020941687377e-07, + "loss": 0.5539, + "step": 19049 + }, + { + "epoch": 0.786085664768507, + "grad_norm": 7.329225014799797, + "learning_rate": 3.458521492999036e-07, + "loss": 0.4554, + "step": 19050 + }, + { + "epoch": 0.786126929107865, + "grad_norm": 4.2309047432129105, + "learning_rate": 3.4572410979890123e-07, + "loss": 0.5109, + "step": 19051 + }, + { + "epoch": 0.7861681934472229, + "grad_norm": 2.323461202736227, + "learning_rate": 3.4559609091615415e-07, + "loss": 0.5446, + "step": 19052 + }, + { + "epoch": 0.7862094577865808, + "grad_norm": 2.8104872891691666, + "learning_rate": 3.4546809265394845e-07, + "loss": 0.4758, + "step": 19053 + }, + { + "epoch": 0.7862507221259387, + "grad_norm": 1.9853397694915325, + "learning_rate": 3.4534011501457075e-07, + "loss": 0.4811, + "step": 19054 + }, + { + "epoch": 0.7862919864652967, + "grad_norm": 2.5348006876447764, + "learning_rate": 3.4521215800030747e-07, + "loss": 0.4967, + "step": 19055 + }, + { + "epoch": 0.7863332508046547, + "grad_norm": 50.74463858330511, + "learning_rate": 3.450842216134437e-07, + "loss": 0.4682, + "step": 19056 + }, + { + "epoch": 0.7863745151440126, + "grad_norm": 3.3182359338994876, + "learning_rate": 3.4495630585626463e-07, + "loss": 0.4941, + "step": 19057 + }, + { + "epoch": 0.7864157794833705, + "grad_norm": 5.27530353809505, + "learning_rate": 3.448284107310556e-07, + "loss": 0.5518, + "step": 19058 + }, + { + "epoch": 0.7864570438227284, + "grad_norm": 2.7827192055337937, + "learning_rate": 3.4470053624010133e-07, + "loss": 0.493, + "step": 19059 + }, + { + "epoch": 0.7864983081620863, + "grad_norm": 3.0765240276905885, + "learning_rate": 3.445726823856851e-07, + "loss": 0.4757, + "step": 19060 + }, + { + "epoch": 0.7865395725014442, + "grad_norm": 2.3461259126051517, + "learning_rate": 3.444448491700916e-07, + "loss": 0.4559, + "step": 19061 + }, + { + "epoch": 0.7865808368408022, + "grad_norm": 3.7075876543957693, + "learning_rate": 3.443170365956035e-07, + "loss": 0.5361, + "step": 19062 + }, + { + "epoch": 0.7866221011801601, + "grad_norm": 3.006430736503186, + "learning_rate": 3.441892446645042e-07, + "loss": 0.5541, + "step": 19063 + }, + { + "epoch": 0.786663365519518, + "grad_norm": 2.8624233211592056, + "learning_rate": 3.440614733790769e-07, + "loss": 0.5186, + "step": 19064 + }, + { + "epoch": 0.786704629858876, + "grad_norm": 2.3360912786125576, + "learning_rate": 3.439337227416028e-07, + "loss": 0.5039, + "step": 19065 + }, + { + "epoch": 0.7867458941982339, + "grad_norm": 3.0924520109069986, + "learning_rate": 3.4380599275436455e-07, + "loss": 0.4697, + "step": 19066 + }, + { + "epoch": 0.7867871585375918, + "grad_norm": 5.105742144151861, + "learning_rate": 3.436782834196436e-07, + "loss": 0.4801, + "step": 19067 + }, + { + "epoch": 0.7868284228769498, + "grad_norm": 1.9302664258615088, + "learning_rate": 3.435505947397216e-07, + "loss": 0.5275, + "step": 19068 + }, + { + "epoch": 0.7868696872163077, + "grad_norm": 2.912498031270953, + "learning_rate": 3.434229267168786e-07, + "loss": 0.5349, + "step": 19069 + }, + { + "epoch": 0.7869109515556656, + "grad_norm": 2.282501717783205, + "learning_rate": 3.4329527935339536e-07, + "loss": 0.4405, + "step": 19070 + }, + { + "epoch": 0.7869522158950235, + "grad_norm": 7.703681656276182, + "learning_rate": 3.431676526515522e-07, + "loss": 0.502, + "step": 19071 + }, + { + "epoch": 0.7869934802343814, + "grad_norm": 2.2401734847520203, + "learning_rate": 3.4304004661362893e-07, + "loss": 0.5032, + "step": 19072 + }, + { + "epoch": 0.7870347445737393, + "grad_norm": 2.652716254631676, + "learning_rate": 3.429124612419044e-07, + "loss": 0.4713, + "step": 19073 + }, + { + "epoch": 0.7870760089130973, + "grad_norm": 8.560467148456537, + "learning_rate": 3.427848965386583e-07, + "loss": 0.547, + "step": 19074 + }, + { + "epoch": 0.7871172732524552, + "grad_norm": 5.764338793552743, + "learning_rate": 3.4265735250616847e-07, + "loss": 0.6102, + "step": 19075 + }, + { + "epoch": 0.7871585375918132, + "grad_norm": 2.431034518709802, + "learning_rate": 3.4252982914671394e-07, + "loss": 0.5125, + "step": 19076 + }, + { + "epoch": 0.7871998019311711, + "grad_norm": 3.3675610330025525, + "learning_rate": 3.4240232646257186e-07, + "loss": 0.5006, + "step": 19077 + }, + { + "epoch": 0.787241066270529, + "grad_norm": 2.248171615439306, + "learning_rate": 3.4227484445601994e-07, + "loss": 0.4942, + "step": 19078 + }, + { + "epoch": 0.787282330609887, + "grad_norm": 8.682770124974262, + "learning_rate": 3.4214738312933564e-07, + "loss": 0.4678, + "step": 19079 + }, + { + "epoch": 0.7873235949492449, + "grad_norm": 3.246735116076866, + "learning_rate": 3.4201994248479586e-07, + "loss": 0.509, + "step": 19080 + }, + { + "epoch": 0.7873648592886028, + "grad_norm": 33.332229887735174, + "learning_rate": 3.418925225246764e-07, + "loss": 0.4848, + "step": 19081 + }, + { + "epoch": 0.7874061236279607, + "grad_norm": 3.1259285771927448, + "learning_rate": 3.4176512325125365e-07, + "loss": 0.5767, + "step": 19082 + }, + { + "epoch": 0.7874473879673186, + "grad_norm": 3.1974508367776124, + "learning_rate": 3.4163774466680324e-07, + "loss": 0.5133, + "step": 19083 + }, + { + "epoch": 0.7874886523066765, + "grad_norm": 2.7665751586106677, + "learning_rate": 3.415103867736009e-07, + "loss": 0.4403, + "step": 19084 + }, + { + "epoch": 0.7875299166460344, + "grad_norm": 3.590769743504242, + "learning_rate": 3.413830495739206e-07, + "loss": 0.5471, + "step": 19085 + }, + { + "epoch": 0.7875711809853925, + "grad_norm": 2.411201621835968, + "learning_rate": 3.412557330700376e-07, + "loss": 0.4921, + "step": 19086 + }, + { + "epoch": 0.7876124453247504, + "grad_norm": 2.927472620625037, + "learning_rate": 3.41128437264226e-07, + "loss": 0.4585, + "step": 19087 + }, + { + "epoch": 0.7876537096641083, + "grad_norm": 4.895992474069157, + "learning_rate": 3.4100116215875984e-07, + "loss": 0.5849, + "step": 19088 + }, + { + "epoch": 0.7876949740034662, + "grad_norm": 3.56714408568256, + "learning_rate": 3.408739077559124e-07, + "loss": 0.5018, + "step": 19089 + }, + { + "epoch": 0.7877362383428241, + "grad_norm": 8.53194782680317, + "learning_rate": 3.407466740579562e-07, + "loss": 0.5274, + "step": 19090 + }, + { + "epoch": 0.787777502682182, + "grad_norm": 3.5490011342175314, + "learning_rate": 3.406194610671644e-07, + "loss": 0.5307, + "step": 19091 + }, + { + "epoch": 0.78781876702154, + "grad_norm": 4.854446495755353, + "learning_rate": 3.4049226878580964e-07, + "loss": 0.5222, + "step": 19092 + }, + { + "epoch": 0.7878600313608979, + "grad_norm": 3.491640670224496, + "learning_rate": 3.4036509721616336e-07, + "loss": 0.556, + "step": 19093 + }, + { + "epoch": 0.7879012957002558, + "grad_norm": 2.265182217108977, + "learning_rate": 3.402379463604973e-07, + "loss": 0.562, + "step": 19094 + }, + { + "epoch": 0.7879425600396137, + "grad_norm": 3.065059213394057, + "learning_rate": 3.40110816221083e-07, + "loss": 0.4852, + "step": 19095 + }, + { + "epoch": 0.7879838243789717, + "grad_norm": 9.537870296647121, + "learning_rate": 3.3998370680019134e-07, + "loss": 0.5186, + "step": 19096 + }, + { + "epoch": 0.7880250887183297, + "grad_norm": 4.024073360223738, + "learning_rate": 3.398566181000922e-07, + "loss": 0.5372, + "step": 19097 + }, + { + "epoch": 0.7880663530576876, + "grad_norm": 2.1550477108442996, + "learning_rate": 3.3972955012305604e-07, + "loss": 0.518, + "step": 19098 + }, + { + "epoch": 0.7881076173970455, + "grad_norm": 12.15317253299096, + "learning_rate": 3.3960250287135277e-07, + "loss": 0.5485, + "step": 19099 + }, + { + "epoch": 0.7881488817364034, + "grad_norm": 3.1352267051973266, + "learning_rate": 3.394754763472521e-07, + "loss": 0.4649, + "step": 19100 + }, + { + "epoch": 0.7881901460757613, + "grad_norm": 2.5921905711327176, + "learning_rate": 3.3934847055302256e-07, + "loss": 0.5402, + "step": 19101 + }, + { + "epoch": 0.7882314104151192, + "grad_norm": 2.9190336457812815, + "learning_rate": 3.392214854909325e-07, + "loss": 0.5088, + "step": 19102 + }, + { + "epoch": 0.7882726747544772, + "grad_norm": 2.882293727232147, + "learning_rate": 3.390945211632505e-07, + "loss": 0.4866, + "step": 19103 + }, + { + "epoch": 0.7883139390938351, + "grad_norm": 2.5193621132075554, + "learning_rate": 3.389675775722444e-07, + "loss": 0.5326, + "step": 19104 + }, + { + "epoch": 0.788355203433193, + "grad_norm": 6.459479645143102, + "learning_rate": 3.388406547201823e-07, + "loss": 0.6035, + "step": 19105 + }, + { + "epoch": 0.788396467772551, + "grad_norm": 4.576725505151126, + "learning_rate": 3.387137526093305e-07, + "loss": 0.4965, + "step": 19106 + }, + { + "epoch": 0.7884377321119089, + "grad_norm": 2.8182810153805615, + "learning_rate": 3.385868712419561e-07, + "loss": 0.4569, + "step": 19107 + }, + { + "epoch": 0.7884789964512668, + "grad_norm": 4.2659730927471475, + "learning_rate": 3.384600106203257e-07, + "loss": 0.4801, + "step": 19108 + }, + { + "epoch": 0.7885202607906248, + "grad_norm": 3.2505899363680353, + "learning_rate": 3.383331707467056e-07, + "loss": 0.5224, + "step": 19109 + }, + { + "epoch": 0.7885615251299827, + "grad_norm": 3.5770662350523406, + "learning_rate": 3.3820635162336067e-07, + "loss": 0.5015, + "step": 19110 + }, + { + "epoch": 0.7886027894693406, + "grad_norm": 5.471969421014651, + "learning_rate": 3.3807955325255657e-07, + "loss": 0.5005, + "step": 19111 + }, + { + "epoch": 0.7886440538086985, + "grad_norm": 3.2124147975425332, + "learning_rate": 3.3795277563655885e-07, + "loss": 0.5176, + "step": 19112 + }, + { + "epoch": 0.7886853181480564, + "grad_norm": 2.17585855518798, + "learning_rate": 3.378260187776312e-07, + "loss": 0.4864, + "step": 19113 + }, + { + "epoch": 0.7887265824874143, + "grad_norm": 2.2625089249298984, + "learning_rate": 3.376992826780386e-07, + "loss": 0.4964, + "step": 19114 + }, + { + "epoch": 0.7887678468267723, + "grad_norm": 2.2579316784943693, + "learning_rate": 3.3757256734004383e-07, + "loss": 0.4987, + "step": 19115 + }, + { + "epoch": 0.7888091111661303, + "grad_norm": 2.8962423018396386, + "learning_rate": 3.374458727659112e-07, + "loss": 0.5085, + "step": 19116 + }, + { + "epoch": 0.7888503755054882, + "grad_norm": 5.704095052232106, + "learning_rate": 3.373191989579038e-07, + "loss": 0.5369, + "step": 19117 + }, + { + "epoch": 0.7888916398448461, + "grad_norm": 3.5852346235101753, + "learning_rate": 3.3719254591828365e-07, + "loss": 0.5374, + "step": 19118 + }, + { + "epoch": 0.788932904184204, + "grad_norm": 2.6866882823071783, + "learning_rate": 3.370659136493137e-07, + "loss": 0.4882, + "step": 19119 + }, + { + "epoch": 0.788974168523562, + "grad_norm": 4.166705143305361, + "learning_rate": 3.369393021532556e-07, + "loss": 0.4861, + "step": 19120 + }, + { + "epoch": 0.7890154328629199, + "grad_norm": 3.42322188542202, + "learning_rate": 3.3681271143237156e-07, + "loss": 0.4924, + "step": 19121 + }, + { + "epoch": 0.7890566972022778, + "grad_norm": 3.9673727806237977, + "learning_rate": 3.3668614148892183e-07, + "loss": 0.4813, + "step": 19122 + }, + { + "epoch": 0.7890979615416357, + "grad_norm": 3.2235477530580696, + "learning_rate": 3.365595923251679e-07, + "loss": 0.4995, + "step": 19123 + }, + { + "epoch": 0.7891392258809936, + "grad_norm": 4.4901404540123675, + "learning_rate": 3.3643306394337013e-07, + "loss": 0.4812, + "step": 19124 + }, + { + "epoch": 0.7891804902203515, + "grad_norm": 2.3180753282056212, + "learning_rate": 3.3630655634578905e-07, + "loss": 0.4708, + "step": 19125 + }, + { + "epoch": 0.7892217545597096, + "grad_norm": 2.941061997565453, + "learning_rate": 3.361800695346835e-07, + "loss": 0.5259, + "step": 19126 + }, + { + "epoch": 0.7892630188990675, + "grad_norm": 3.980153207725948, + "learning_rate": 3.3605360351231384e-07, + "loss": 0.5632, + "step": 19127 + }, + { + "epoch": 0.7893042832384254, + "grad_norm": 2.9635131347275774, + "learning_rate": 3.359271582809381e-07, + "loss": 0.5488, + "step": 19128 + }, + { + "epoch": 0.7893455475777833, + "grad_norm": 17.919932673227876, + "learning_rate": 3.3580073384281535e-07, + "loss": 0.4877, + "step": 19129 + }, + { + "epoch": 0.7893868119171412, + "grad_norm": 3.259870312849332, + "learning_rate": 3.3567433020020436e-07, + "loss": 0.5122, + "step": 19130 + }, + { + "epoch": 0.7894280762564991, + "grad_norm": 4.964582546208078, + "learning_rate": 3.355479473553621e-07, + "loss": 0.5769, + "step": 19131 + }, + { + "epoch": 0.789469340595857, + "grad_norm": 3.5610829539926403, + "learning_rate": 3.354215853105465e-07, + "loss": 0.5322, + "step": 19132 + }, + { + "epoch": 0.789510604935215, + "grad_norm": 10.624544348776306, + "learning_rate": 3.3529524406801515e-07, + "loss": 0.5755, + "step": 19133 + }, + { + "epoch": 0.7895518692745729, + "grad_norm": 3.7501966761778385, + "learning_rate": 3.3516892363002386e-07, + "loss": 0.4831, + "step": 19134 + }, + { + "epoch": 0.7895931336139308, + "grad_norm": 7.282273512628313, + "learning_rate": 3.350426239988297e-07, + "loss": 0.5491, + "step": 19135 + }, + { + "epoch": 0.7896343979532887, + "grad_norm": 2.0227296570689086, + "learning_rate": 3.349163451766884e-07, + "loss": 0.5387, + "step": 19136 + }, + { + "epoch": 0.7896756622926467, + "grad_norm": 8.028443721231811, + "learning_rate": 3.347900871658564e-07, + "loss": 0.5165, + "step": 19137 + }, + { + "epoch": 0.7897169266320047, + "grad_norm": 2.3132482475482807, + "learning_rate": 3.346638499685878e-07, + "loss": 0.5169, + "step": 19138 + }, + { + "epoch": 0.7897581909713626, + "grad_norm": 7.706563766078722, + "learning_rate": 3.3453763358713806e-07, + "loss": 0.4775, + "step": 19139 + }, + { + "epoch": 0.7897994553107205, + "grad_norm": 2.936840358932265, + "learning_rate": 3.344114380237621e-07, + "loss": 0.5115, + "step": 19140 + }, + { + "epoch": 0.7898407196500784, + "grad_norm": 3.2550179632029512, + "learning_rate": 3.342852632807135e-07, + "loss": 0.5518, + "step": 19141 + }, + { + "epoch": 0.7898819839894363, + "grad_norm": 6.266488576778696, + "learning_rate": 3.3415910936024647e-07, + "loss": 0.5156, + "step": 19142 + }, + { + "epoch": 0.7899232483287942, + "grad_norm": 3.7185910233112884, + "learning_rate": 3.340329762646138e-07, + "loss": 0.4983, + "step": 19143 + }, + { + "epoch": 0.7899645126681522, + "grad_norm": 3.4731747070849965, + "learning_rate": 3.339068639960691e-07, + "loss": 0.4888, + "step": 19144 + }, + { + "epoch": 0.7900057770075101, + "grad_norm": 7.680810829830635, + "learning_rate": 3.337807725568649e-07, + "loss": 0.5604, + "step": 19145 + }, + { + "epoch": 0.790047041346868, + "grad_norm": 2.6161881155530735, + "learning_rate": 3.3365470194925403e-07, + "loss": 0.4683, + "step": 19146 + }, + { + "epoch": 0.790088305686226, + "grad_norm": 10.946405390915404, + "learning_rate": 3.3352865217548747e-07, + "loss": 0.5166, + "step": 19147 + }, + { + "epoch": 0.7901295700255839, + "grad_norm": 3.700265823254626, + "learning_rate": 3.3340262323781704e-07, + "loss": 0.518, + "step": 19148 + }, + { + "epoch": 0.7901708343649418, + "grad_norm": 4.569126371878695, + "learning_rate": 3.3327661513849437e-07, + "loss": 0.4896, + "step": 19149 + }, + { + "epoch": 0.7902120987042998, + "grad_norm": 4.769263001186181, + "learning_rate": 3.3315062787977044e-07, + "loss": 0.5063, + "step": 19150 + }, + { + "epoch": 0.7902533630436577, + "grad_norm": 2.258577525783819, + "learning_rate": 3.330246614638948e-07, + "loss": 0.4835, + "step": 19151 + }, + { + "epoch": 0.7902946273830156, + "grad_norm": 2.418965961630519, + "learning_rate": 3.328987158931182e-07, + "loss": 0.5088, + "step": 19152 + }, + { + "epoch": 0.7903358917223735, + "grad_norm": 2.527724728689649, + "learning_rate": 3.3277279116969034e-07, + "loss": 0.4491, + "step": 19153 + }, + { + "epoch": 0.7903771560617314, + "grad_norm": 2.0173012408551068, + "learning_rate": 3.3264688729586055e-07, + "loss": 0.444, + "step": 19154 + }, + { + "epoch": 0.7904184204010893, + "grad_norm": 3.4676714957030326, + "learning_rate": 3.325210042738772e-07, + "loss": 0.4808, + "step": 19155 + }, + { + "epoch": 0.7904596847404473, + "grad_norm": 10.014057657670675, + "learning_rate": 3.3239514210598933e-07, + "loss": 0.478, + "step": 19156 + }, + { + "epoch": 0.7905009490798053, + "grad_norm": 6.782289801626726, + "learning_rate": 3.3226930079444506e-07, + "loss": 0.533, + "step": 19157 + }, + { + "epoch": 0.7905422134191632, + "grad_norm": 4.020688597305227, + "learning_rate": 3.3214348034149264e-07, + "loss": 0.4753, + "step": 19158 + }, + { + "epoch": 0.7905834777585211, + "grad_norm": 2.7444195532239504, + "learning_rate": 3.320176807493788e-07, + "loss": 0.5164, + "step": 19159 + }, + { + "epoch": 0.790624742097879, + "grad_norm": 4.134608915342911, + "learning_rate": 3.318919020203511e-07, + "loss": 0.4335, + "step": 19160 + }, + { + "epoch": 0.790666006437237, + "grad_norm": 5.638571176204643, + "learning_rate": 3.317661441566563e-07, + "loss": 0.5038, + "step": 19161 + }, + { + "epoch": 0.7907072707765949, + "grad_norm": 3.88372269140181, + "learning_rate": 3.316404071605408e-07, + "loss": 0.52, + "step": 19162 + }, + { + "epoch": 0.7907485351159528, + "grad_norm": 3.5633935313242797, + "learning_rate": 3.315146910342501e-07, + "loss": 0.4666, + "step": 19163 + }, + { + "epoch": 0.7907897994553107, + "grad_norm": 5.251759342006074, + "learning_rate": 3.313889957800302e-07, + "loss": 0.5383, + "step": 19164 + }, + { + "epoch": 0.7908310637946686, + "grad_norm": 3.1745167915913233, + "learning_rate": 3.312633214001262e-07, + "loss": 0.5363, + "step": 19165 + }, + { + "epoch": 0.7908723281340265, + "grad_norm": 3.0676736877805255, + "learning_rate": 3.311376678967834e-07, + "loss": 0.5214, + "step": 19166 + }, + { + "epoch": 0.7909135924733846, + "grad_norm": 2.2117536787363976, + "learning_rate": 3.31012035272246e-07, + "loss": 0.4455, + "step": 19167 + }, + { + "epoch": 0.7909548568127425, + "grad_norm": 4.337453764740525, + "learning_rate": 3.3088642352875747e-07, + "loss": 0.5212, + "step": 19168 + }, + { + "epoch": 0.7909961211521004, + "grad_norm": 3.4535622595713433, + "learning_rate": 3.30760832668562e-07, + "loss": 0.5272, + "step": 19169 + }, + { + "epoch": 0.7910373854914583, + "grad_norm": 4.360173448846017, + "learning_rate": 3.3063526269390376e-07, + "loss": 0.4981, + "step": 19170 + }, + { + "epoch": 0.7910786498308162, + "grad_norm": 2.9062216276188604, + "learning_rate": 3.305097136070243e-07, + "loss": 0.5463, + "step": 19171 + }, + { + "epoch": 0.7911199141701741, + "grad_norm": 3.8928909303704216, + "learning_rate": 3.3038418541016725e-07, + "loss": 0.5031, + "step": 19172 + }, + { + "epoch": 0.791161178509532, + "grad_norm": 2.487476496529219, + "learning_rate": 3.302586781055744e-07, + "loss": 0.5397, + "step": 19173 + }, + { + "epoch": 0.79120244284889, + "grad_norm": 3.4682493072726515, + "learning_rate": 3.301331916954882e-07, + "loss": 0.5108, + "step": 19174 + }, + { + "epoch": 0.7912437071882479, + "grad_norm": 7.144992027723771, + "learning_rate": 3.3000772618214955e-07, + "loss": 0.4707, + "step": 19175 + }, + { + "epoch": 0.7912849715276058, + "grad_norm": 2.8544507646819306, + "learning_rate": 3.2988228156779954e-07, + "loss": 0.5367, + "step": 19176 + }, + { + "epoch": 0.7913262358669638, + "grad_norm": 3.0005295657617364, + "learning_rate": 3.297568578546793e-07, + "loss": 0.5016, + "step": 19177 + }, + { + "epoch": 0.7913675002063217, + "grad_norm": 1.8521235430286092, + "learning_rate": 3.296314550450295e-07, + "loss": 0.5301, + "step": 19178 + }, + { + "epoch": 0.7914087645456797, + "grad_norm": 3.041483561014364, + "learning_rate": 3.295060731410892e-07, + "loss": 0.5576, + "step": 19179 + }, + { + "epoch": 0.7914500288850376, + "grad_norm": 5.331764635415571, + "learning_rate": 3.293807121450991e-07, + "loss": 0.5077, + "step": 19180 + }, + { + "epoch": 0.7914912932243955, + "grad_norm": 3.4752750306785747, + "learning_rate": 3.2925537205929746e-07, + "loss": 0.5119, + "step": 19181 + }, + { + "epoch": 0.7915325575637534, + "grad_norm": 6.929427421398422, + "learning_rate": 3.291300528859236e-07, + "loss": 0.5311, + "step": 19182 + }, + { + "epoch": 0.7915738219031113, + "grad_norm": 3.023398976911922, + "learning_rate": 3.290047546272165e-07, + "loss": 0.4795, + "step": 19183 + }, + { + "epoch": 0.7916150862424692, + "grad_norm": 3.323828913420191, + "learning_rate": 3.288794772854135e-07, + "loss": 0.4756, + "step": 19184 + }, + { + "epoch": 0.7916563505818272, + "grad_norm": 7.347130279208687, + "learning_rate": 3.287542208627526e-07, + "loss": 0.5109, + "step": 19185 + }, + { + "epoch": 0.7916976149211851, + "grad_norm": 13.501417697439598, + "learning_rate": 3.2862898536147144e-07, + "loss": 0.4929, + "step": 19186 + }, + { + "epoch": 0.7917388792605431, + "grad_norm": 8.93839369785737, + "learning_rate": 3.285037707838073e-07, + "loss": 0.524, + "step": 19187 + }, + { + "epoch": 0.791780143599901, + "grad_norm": 2.7871357057625468, + "learning_rate": 3.283785771319961e-07, + "loss": 0.5022, + "step": 19188 + }, + { + "epoch": 0.7918214079392589, + "grad_norm": 4.245726879010856, + "learning_rate": 3.282534044082743e-07, + "loss": 0.516, + "step": 19189 + }, + { + "epoch": 0.7918626722786168, + "grad_norm": 3.1327643042866287, + "learning_rate": 3.2812825261487845e-07, + "loss": 0.498, + "step": 19190 + }, + { + "epoch": 0.7919039366179748, + "grad_norm": 7.970128391392498, + "learning_rate": 3.280031217540433e-07, + "loss": 0.5309, + "step": 19191 + }, + { + "epoch": 0.7919452009573327, + "grad_norm": 2.064333126612681, + "learning_rate": 3.2787801182800417e-07, + "loss": 0.4971, + "step": 19192 + }, + { + "epoch": 0.7919864652966906, + "grad_norm": 2.3190687122595217, + "learning_rate": 3.2775292283899637e-07, + "loss": 0.5255, + "step": 19193 + }, + { + "epoch": 0.7920277296360485, + "grad_norm": 2.171231805914268, + "learning_rate": 3.276278547892534e-07, + "loss": 0.4884, + "step": 19194 + }, + { + "epoch": 0.7920689939754064, + "grad_norm": 4.577195213924266, + "learning_rate": 3.2750280768101026e-07, + "loss": 0.5322, + "step": 19195 + }, + { + "epoch": 0.7921102583147643, + "grad_norm": 3.1845954529834106, + "learning_rate": 3.2737778151649974e-07, + "loss": 0.5697, + "step": 19196 + }, + { + "epoch": 0.7921515226541223, + "grad_norm": 9.01560130283747, + "learning_rate": 3.272527762979553e-07, + "loss": 0.5175, + "step": 19197 + }, + { + "epoch": 0.7921927869934803, + "grad_norm": 2.8326714685203647, + "learning_rate": 3.2712779202761023e-07, + "loss": 0.5218, + "step": 19198 + }, + { + "epoch": 0.7922340513328382, + "grad_norm": 6.154553151832734, + "learning_rate": 3.2700282870769724e-07, + "loss": 0.5297, + "step": 19199 + }, + { + "epoch": 0.7922753156721961, + "grad_norm": 14.521787032550394, + "learning_rate": 3.268778863404478e-07, + "loss": 0.4716, + "step": 19200 + }, + { + "epoch": 0.792316580011554, + "grad_norm": 5.664437670158745, + "learning_rate": 3.267529649280939e-07, + "loss": 0.5082, + "step": 19201 + }, + { + "epoch": 0.7923578443509119, + "grad_norm": 2.9837156443664674, + "learning_rate": 3.2662806447286697e-07, + "loss": 0.5347, + "step": 19202 + }, + { + "epoch": 0.7923991086902699, + "grad_norm": 8.418769752444485, + "learning_rate": 3.2650318497699864e-07, + "loss": 0.5119, + "step": 19203 + }, + { + "epoch": 0.7924403730296278, + "grad_norm": 2.6704701657153715, + "learning_rate": 3.2637832644271866e-07, + "loss": 0.5001, + "step": 19204 + }, + { + "epoch": 0.7924816373689857, + "grad_norm": 4.039465405457415, + "learning_rate": 3.262534888722578e-07, + "loss": 0.4861, + "step": 19205 + }, + { + "epoch": 0.7925229017083436, + "grad_norm": 2.2034057341936473, + "learning_rate": 3.261286722678462e-07, + "loss": 0.5463, + "step": 19206 + }, + { + "epoch": 0.7925641660477015, + "grad_norm": 5.2642766516801585, + "learning_rate": 3.2600387663171274e-07, + "loss": 0.4743, + "step": 19207 + }, + { + "epoch": 0.7926054303870596, + "grad_norm": 3.6518130488466722, + "learning_rate": 3.258791019660872e-07, + "loss": 0.5367, + "step": 19208 + }, + { + "epoch": 0.7926466947264175, + "grad_norm": 2.9435426029483978, + "learning_rate": 3.2575434827319794e-07, + "loss": 0.4557, + "step": 19209 + }, + { + "epoch": 0.7926879590657754, + "grad_norm": 8.576524338618402, + "learning_rate": 3.2562961555527326e-07, + "loss": 0.5476, + "step": 19210 + }, + { + "epoch": 0.7927292234051333, + "grad_norm": 3.8837755873338593, + "learning_rate": 3.255049038145421e-07, + "loss": 0.4791, + "step": 19211 + }, + { + "epoch": 0.7927704877444912, + "grad_norm": 3.457071169690916, + "learning_rate": 3.253802130532309e-07, + "loss": 0.5198, + "step": 19212 + }, + { + "epoch": 0.7928117520838491, + "grad_norm": 3.5851815057480287, + "learning_rate": 3.252555432735677e-07, + "loss": 0.5296, + "step": 19213 + }, + { + "epoch": 0.792853016423207, + "grad_norm": 2.3498223993083758, + "learning_rate": 3.251308944777792e-07, + "loss": 0.507, + "step": 19214 + }, + { + "epoch": 0.792894280762565, + "grad_norm": 2.9061585084216426, + "learning_rate": 3.2500626666809245e-07, + "loss": 0.5563, + "step": 19215 + }, + { + "epoch": 0.7929355451019229, + "grad_norm": 2.7654747763974736, + "learning_rate": 3.2488165984673264e-07, + "loss": 0.5302, + "step": 19216 + }, + { + "epoch": 0.7929768094412808, + "grad_norm": 3.5549892585495177, + "learning_rate": 3.2475707401592626e-07, + "loss": 0.5217, + "step": 19217 + }, + { + "epoch": 0.7930180737806388, + "grad_norm": 2.5660228127304814, + "learning_rate": 3.2463250917789837e-07, + "loss": 0.4998, + "step": 19218 + }, + { + "epoch": 0.7930593381199967, + "grad_norm": 2.517056527938359, + "learning_rate": 3.245079653348746e-07, + "loss": 0.4527, + "step": 19219 + }, + { + "epoch": 0.7931006024593547, + "grad_norm": 3.029184953055577, + "learning_rate": 3.2438344248907925e-07, + "loss": 0.5219, + "step": 19220 + }, + { + "epoch": 0.7931418667987126, + "grad_norm": 2.391220253017393, + "learning_rate": 3.2425894064273626e-07, + "loss": 0.4629, + "step": 19221 + }, + { + "epoch": 0.7931831311380705, + "grad_norm": 3.5261683274724347, + "learning_rate": 3.241344597980697e-07, + "loss": 0.4893, + "step": 19222 + }, + { + "epoch": 0.7932243954774284, + "grad_norm": 2.5050542085683123, + "learning_rate": 3.2400999995730335e-07, + "loss": 0.4999, + "step": 19223 + }, + { + "epoch": 0.7932656598167863, + "grad_norm": 3.9751891323175736, + "learning_rate": 3.2388556112266055e-07, + "loss": 0.5235, + "step": 19224 + }, + { + "epoch": 0.7933069241561442, + "grad_norm": 2.3748489253520186, + "learning_rate": 3.237611432963635e-07, + "loss": 0.5008, + "step": 19225 + }, + { + "epoch": 0.7933481884955021, + "grad_norm": 3.5123771353182107, + "learning_rate": 3.236367464806348e-07, + "loss": 0.4899, + "step": 19226 + }, + { + "epoch": 0.7933894528348601, + "grad_norm": 7.727436107516858, + "learning_rate": 3.235123706776969e-07, + "loss": 0.4715, + "step": 19227 + }, + { + "epoch": 0.7934307171742181, + "grad_norm": 2.395212847701249, + "learning_rate": 3.2338801588977136e-07, + "loss": 0.4479, + "step": 19228 + }, + { + "epoch": 0.793471981513576, + "grad_norm": 2.123460967767936, + "learning_rate": 3.232636821190789e-07, + "loss": 0.4674, + "step": 19229 + }, + { + "epoch": 0.7935132458529339, + "grad_norm": 3.924798551240068, + "learning_rate": 3.23139369367841e-07, + "loss": 0.4837, + "step": 19230 + }, + { + "epoch": 0.7935545101922918, + "grad_norm": 8.148065157920321, + "learning_rate": 3.2301507763827816e-07, + "loss": 0.4795, + "step": 19231 + }, + { + "epoch": 0.7935957745316498, + "grad_norm": 2.497058846492332, + "learning_rate": 3.228908069326106e-07, + "loss": 0.4848, + "step": 19232 + }, + { + "epoch": 0.7936370388710077, + "grad_norm": 2.507292633133091, + "learning_rate": 3.2276655725305737e-07, + "loss": 0.4763, + "step": 19233 + }, + { + "epoch": 0.7936783032103656, + "grad_norm": 3.9086036452726716, + "learning_rate": 3.2264232860183857e-07, + "loss": 0.5548, + "step": 19234 + }, + { + "epoch": 0.7937195675497235, + "grad_norm": 3.489857023481666, + "learning_rate": 3.225181209811729e-07, + "loss": 0.543, + "step": 19235 + }, + { + "epoch": 0.7937608318890814, + "grad_norm": 3.9389229659974108, + "learning_rate": 3.223939343932797e-07, + "loss": 0.544, + "step": 19236 + }, + { + "epoch": 0.7938020962284393, + "grad_norm": 8.483198670840029, + "learning_rate": 3.222697688403763e-07, + "loss": 0.5516, + "step": 19237 + }, + { + "epoch": 0.7938433605677974, + "grad_norm": 3.349561375738006, + "learning_rate": 3.221456243246811e-07, + "loss": 0.5509, + "step": 19238 + }, + { + "epoch": 0.7938846249071553, + "grad_norm": 1.8333985949965128, + "learning_rate": 3.2202150084841156e-07, + "loss": 0.4858, + "step": 19239 + }, + { + "epoch": 0.7939258892465132, + "grad_norm": 11.943343656706771, + "learning_rate": 3.2189739841378526e-07, + "loss": 0.6049, + "step": 19240 + }, + { + "epoch": 0.7939671535858711, + "grad_norm": 3.826657832154807, + "learning_rate": 3.2177331702301824e-07, + "loss": 0.5531, + "step": 19241 + }, + { + "epoch": 0.794008417925229, + "grad_norm": 5.878536368335581, + "learning_rate": 3.216492566783272e-07, + "loss": 0.5517, + "step": 19242 + }, + { + "epoch": 0.7940496822645869, + "grad_norm": 4.256944357351034, + "learning_rate": 3.2152521738192817e-07, + "loss": 0.5436, + "step": 19243 + }, + { + "epoch": 0.7940909466039449, + "grad_norm": 2.93572270708565, + "learning_rate": 3.2140119913603737e-07, + "loss": 0.5285, + "step": 19244 + }, + { + "epoch": 0.7941322109433028, + "grad_norm": 6.600064150558211, + "learning_rate": 3.2127720194286963e-07, + "loss": 0.5434, + "step": 19245 + }, + { + "epoch": 0.7941734752826607, + "grad_norm": 3.3734723822069106, + "learning_rate": 3.211532258046392e-07, + "loss": 0.5422, + "step": 19246 + }, + { + "epoch": 0.7942147396220186, + "grad_norm": 2.6460760914697268, + "learning_rate": 3.2102927072356123e-07, + "loss": 0.5003, + "step": 19247 + }, + { + "epoch": 0.7942560039613766, + "grad_norm": 6.177919315660422, + "learning_rate": 3.2090533670185023e-07, + "loss": 0.528, + "step": 19248 + }, + { + "epoch": 0.7942972683007345, + "grad_norm": 2.4551133813247588, + "learning_rate": 3.2078142374171903e-07, + "loss": 0.4936, + "step": 19249 + }, + { + "epoch": 0.7943385326400925, + "grad_norm": 1.9107535702842406, + "learning_rate": 3.206575318453817e-07, + "loss": 0.4878, + "step": 19250 + }, + { + "epoch": 0.7943797969794504, + "grad_norm": 2.305349882418306, + "learning_rate": 3.2053366101505116e-07, + "loss": 0.5714, + "step": 19251 + }, + { + "epoch": 0.7944210613188083, + "grad_norm": 3.0296944131797576, + "learning_rate": 3.204098112529403e-07, + "loss": 0.5523, + "step": 19252 + }, + { + "epoch": 0.7944623256581662, + "grad_norm": 3.034927786608537, + "learning_rate": 3.202859825612608e-07, + "loss": 0.5088, + "step": 19253 + }, + { + "epoch": 0.7945035899975241, + "grad_norm": 9.17494252236809, + "learning_rate": 3.2016217494222476e-07, + "loss": 0.5585, + "step": 19254 + }, + { + "epoch": 0.794544854336882, + "grad_norm": 2.992391523131595, + "learning_rate": 3.200383883980438e-07, + "loss": 0.5816, + "step": 19255 + }, + { + "epoch": 0.79458611867624, + "grad_norm": 2.8828142355874964, + "learning_rate": 3.199146229309294e-07, + "loss": 0.4873, + "step": 19256 + }, + { + "epoch": 0.7946273830155979, + "grad_norm": 7.444080870283831, + "learning_rate": 3.1979087854309147e-07, + "loss": 0.4926, + "step": 19257 + }, + { + "epoch": 0.7946686473549558, + "grad_norm": 3.410187764855899, + "learning_rate": 3.196671552367411e-07, + "loss": 0.4756, + "step": 19258 + }, + { + "epoch": 0.7947099116943138, + "grad_norm": 2.250411502866603, + "learning_rate": 3.195434530140884e-07, + "loss": 0.4948, + "step": 19259 + }, + { + "epoch": 0.7947511760336717, + "grad_norm": 3.2138849110513275, + "learning_rate": 3.1941977187734217e-07, + "loss": 0.5355, + "step": 19260 + }, + { + "epoch": 0.7947924403730297, + "grad_norm": 2.829252091188604, + "learning_rate": 3.192961118287127e-07, + "loss": 0.4808, + "step": 19261 + }, + { + "epoch": 0.7948337047123876, + "grad_norm": 13.449505611436258, + "learning_rate": 3.191724728704079e-07, + "loss": 0.4722, + "step": 19262 + }, + { + "epoch": 0.7948749690517455, + "grad_norm": 2.5873372590328163, + "learning_rate": 3.190488550046367e-07, + "loss": 0.5125, + "step": 19263 + }, + { + "epoch": 0.7949162333911034, + "grad_norm": 4.523329543028102, + "learning_rate": 3.189252582336072e-07, + "loss": 0.5346, + "step": 19264 + }, + { + "epoch": 0.7949574977304613, + "grad_norm": 4.896443145975635, + "learning_rate": 3.1880168255952755e-07, + "loss": 0.4817, + "step": 19265 + }, + { + "epoch": 0.7949987620698192, + "grad_norm": 4.084563104961389, + "learning_rate": 3.1867812798460457e-07, + "loss": 0.5213, + "step": 19266 + }, + { + "epoch": 0.7950400264091771, + "grad_norm": 3.1901925516951644, + "learning_rate": 3.185545945110452e-07, + "loss": 0.5554, + "step": 19267 + }, + { + "epoch": 0.7950812907485351, + "grad_norm": 4.066274679171999, + "learning_rate": 3.1843108214105685e-07, + "loss": 0.5722, + "step": 19268 + }, + { + "epoch": 0.7951225550878931, + "grad_norm": 4.13959392817138, + "learning_rate": 3.183075908768447e-07, + "loss": 0.496, + "step": 19269 + }, + { + "epoch": 0.795163819427251, + "grad_norm": 4.386875493851275, + "learning_rate": 3.181841207206152e-07, + "loss": 0.5441, + "step": 19270 + }, + { + "epoch": 0.7952050837666089, + "grad_norm": 13.797639463891233, + "learning_rate": 3.1806067167457363e-07, + "loss": 0.5016, + "step": 19271 + }, + { + "epoch": 0.7952463481059668, + "grad_norm": 4.056231909697125, + "learning_rate": 3.1793724374092577e-07, + "loss": 0.501, + "step": 19272 + }, + { + "epoch": 0.7952876124453248, + "grad_norm": 3.0245575145355255, + "learning_rate": 3.178138369218758e-07, + "loss": 0.5137, + "step": 19273 + }, + { + "epoch": 0.7953288767846827, + "grad_norm": 16.041188045804493, + "learning_rate": 3.1769045121962757e-07, + "loss": 0.5305, + "step": 19274 + }, + { + "epoch": 0.7953701411240406, + "grad_norm": 4.544321452938917, + "learning_rate": 3.175670866363856e-07, + "loss": 0.5853, + "step": 19275 + }, + { + "epoch": 0.7954114054633985, + "grad_norm": 1.8668275950505326, + "learning_rate": 3.174437431743536e-07, + "loss": 0.4511, + "step": 19276 + }, + { + "epoch": 0.7954526698027564, + "grad_norm": 2.759258647140775, + "learning_rate": 3.1732042083573493e-07, + "loss": 0.5058, + "step": 19277 + }, + { + "epoch": 0.7954939341421143, + "grad_norm": 2.454830621860504, + "learning_rate": 3.171971196227317e-07, + "loss": 0.5211, + "step": 19278 + }, + { + "epoch": 0.7955351984814724, + "grad_norm": 4.233766658117402, + "learning_rate": 3.1707383953754693e-07, + "loss": 0.5095, + "step": 19279 + }, + { + "epoch": 0.7955764628208303, + "grad_norm": 2.837927284586647, + "learning_rate": 3.1695058058238256e-07, + "loss": 0.4973, + "step": 19280 + }, + { + "epoch": 0.7956177271601882, + "grad_norm": 2.627592093942082, + "learning_rate": 3.168273427594407e-07, + "loss": 0.6015, + "step": 19281 + }, + { + "epoch": 0.7956589914995461, + "grad_norm": 6.426073470984366, + "learning_rate": 3.16704126070922e-07, + "loss": 0.5472, + "step": 19282 + }, + { + "epoch": 0.795700255838904, + "grad_norm": 3.099796370568004, + "learning_rate": 3.1658093051902783e-07, + "loss": 0.4792, + "step": 19283 + }, + { + "epoch": 0.7957415201782619, + "grad_norm": 3.541558022546793, + "learning_rate": 3.164577561059586e-07, + "loss": 0.5107, + "step": 19284 + }, + { + "epoch": 0.7957827845176199, + "grad_norm": 3.6066653365044146, + "learning_rate": 3.163346028339152e-07, + "loss": 0.5256, + "step": 19285 + }, + { + "epoch": 0.7958240488569778, + "grad_norm": 5.176333379622384, + "learning_rate": 3.1621147070509657e-07, + "loss": 0.4808, + "step": 19286 + }, + { + "epoch": 0.7958653131963357, + "grad_norm": 3.8099659832141466, + "learning_rate": 3.1608835972170224e-07, + "loss": 0.5018, + "step": 19287 + }, + { + "epoch": 0.7959065775356936, + "grad_norm": 4.4441413760934045, + "learning_rate": 3.1596526988593154e-07, + "loss": 0.4903, + "step": 19288 + }, + { + "epoch": 0.7959478418750516, + "grad_norm": 4.6488945094681995, + "learning_rate": 3.158422011999834e-07, + "loss": 0.5225, + "step": 19289 + }, + { + "epoch": 0.7959891062144095, + "grad_norm": 2.7632474789890633, + "learning_rate": 3.157191536660556e-07, + "loss": 0.5076, + "step": 19290 + }, + { + "epoch": 0.7960303705537675, + "grad_norm": 7.060429321330449, + "learning_rate": 3.155961272863463e-07, + "loss": 0.4584, + "step": 19291 + }, + { + "epoch": 0.7960716348931254, + "grad_norm": 2.885983813103624, + "learning_rate": 3.15473122063053e-07, + "loss": 0.5131, + "step": 19292 + }, + { + "epoch": 0.7961128992324833, + "grad_norm": 3.1509030557692936, + "learning_rate": 3.153501379983735e-07, + "loss": 0.5475, + "step": 19293 + }, + { + "epoch": 0.7961541635718412, + "grad_norm": 4.172993385529011, + "learning_rate": 3.1522717509450343e-07, + "loss": 0.5749, + "step": 19294 + }, + { + "epoch": 0.7961954279111991, + "grad_norm": 3.3720920447442655, + "learning_rate": 3.151042333536401e-07, + "loss": 0.4713, + "step": 19295 + }, + { + "epoch": 0.796236692250557, + "grad_norm": 5.327411752473559, + "learning_rate": 3.149813127779793e-07, + "loss": 0.5328, + "step": 19296 + }, + { + "epoch": 0.796277956589915, + "grad_norm": 15.386821330944407, + "learning_rate": 3.1485841336971703e-07, + "loss": 0.5409, + "step": 19297 + }, + { + "epoch": 0.7963192209292729, + "grad_norm": 3.485961278952394, + "learning_rate": 3.147355351310482e-07, + "loss": 0.5315, + "step": 19298 + }, + { + "epoch": 0.7963604852686309, + "grad_norm": 2.768105938358231, + "learning_rate": 3.1461267806416747e-07, + "loss": 0.4926, + "step": 19299 + }, + { + "epoch": 0.7964017496079888, + "grad_norm": 3.062407497697536, + "learning_rate": 3.144898421712697e-07, + "loss": 0.5428, + "step": 19300 + }, + { + "epoch": 0.7964430139473467, + "grad_norm": 2.674838017667019, + "learning_rate": 3.1436702745454905e-07, + "loss": 0.5299, + "step": 19301 + }, + { + "epoch": 0.7964842782867047, + "grad_norm": 8.63356230563056, + "learning_rate": 3.142442339161998e-07, + "loss": 0.5199, + "step": 19302 + }, + { + "epoch": 0.7965255426260626, + "grad_norm": 4.195983129353094, + "learning_rate": 3.141214615584143e-07, + "loss": 0.512, + "step": 19303 + }, + { + "epoch": 0.7965668069654205, + "grad_norm": 2.797511738850757, + "learning_rate": 3.139987103833861e-07, + "loss": 0.5734, + "step": 19304 + }, + { + "epoch": 0.7966080713047784, + "grad_norm": 6.487403963837709, + "learning_rate": 3.13875980393308e-07, + "loss": 0.523, + "step": 19305 + }, + { + "epoch": 0.7966493356441363, + "grad_norm": 2.5866408302423363, + "learning_rate": 3.1375327159037235e-07, + "loss": 0.5029, + "step": 19306 + }, + { + "epoch": 0.7966905999834942, + "grad_norm": 7.907572146193589, + "learning_rate": 3.136305839767704e-07, + "loss": 0.5185, + "step": 19307 + }, + { + "epoch": 0.7967318643228521, + "grad_norm": 9.813317359820674, + "learning_rate": 3.1350791755469416e-07, + "loss": 0.557, + "step": 19308 + }, + { + "epoch": 0.7967731286622102, + "grad_norm": 6.97373062087669, + "learning_rate": 3.1338527232633504e-07, + "loss": 0.5085, + "step": 19309 + }, + { + "epoch": 0.7968143930015681, + "grad_norm": 2.023204667644631, + "learning_rate": 3.132626482938829e-07, + "loss": 0.457, + "step": 19310 + }, + { + "epoch": 0.796855657340926, + "grad_norm": 13.350439269224475, + "learning_rate": 3.1314004545952926e-07, + "loss": 0.5466, + "step": 19311 + }, + { + "epoch": 0.7968969216802839, + "grad_norm": 2.3414594988767687, + "learning_rate": 3.130174638254629e-07, + "loss": 0.5254, + "step": 19312 + }, + { + "epoch": 0.7969381860196418, + "grad_norm": 3.8482569729389984, + "learning_rate": 3.1289490339387407e-07, + "loss": 0.5433, + "step": 19313 + }, + { + "epoch": 0.7969794503589998, + "grad_norm": 4.622181925054288, + "learning_rate": 3.127723641669522e-07, + "loss": 0.4366, + "step": 19314 + }, + { + "epoch": 0.7970207146983577, + "grad_norm": 5.260152863572523, + "learning_rate": 3.1264984614688563e-07, + "loss": 0.5331, + "step": 19315 + }, + { + "epoch": 0.7970619790377156, + "grad_norm": 3.333567691587894, + "learning_rate": 3.125273493358631e-07, + "loss": 0.5746, + "step": 19316 + }, + { + "epoch": 0.7971032433770735, + "grad_norm": 4.244367950411403, + "learning_rate": 3.124048737360728e-07, + "loss": 0.48, + "step": 19317 + }, + { + "epoch": 0.7971445077164314, + "grad_norm": 15.842700118567262, + "learning_rate": 3.122824193497027e-07, + "loss": 0.4882, + "step": 19318 + }, + { + "epoch": 0.7971857720557893, + "grad_norm": 20.72768083614501, + "learning_rate": 3.121599861789394e-07, + "loss": 0.5221, + "step": 19319 + }, + { + "epoch": 0.7972270363951474, + "grad_norm": 3.00399704723684, + "learning_rate": 3.120375742259701e-07, + "loss": 0.5212, + "step": 19320 + }, + { + "epoch": 0.7972683007345053, + "grad_norm": 3.1359055754745677, + "learning_rate": 3.119151834929817e-07, + "loss": 0.5173, + "step": 19321 + }, + { + "epoch": 0.7973095650738632, + "grad_norm": 3.261027875443761, + "learning_rate": 3.117928139821607e-07, + "loss": 0.5461, + "step": 19322 + }, + { + "epoch": 0.7973508294132211, + "grad_norm": 10.947003221517459, + "learning_rate": 3.1167046569569196e-07, + "loss": 0.4926, + "step": 19323 + }, + { + "epoch": 0.797392093752579, + "grad_norm": 2.251621338994237, + "learning_rate": 3.1154813863576194e-07, + "loss": 0.548, + "step": 19324 + }, + { + "epoch": 0.7974333580919369, + "grad_norm": 2.1500847901410625, + "learning_rate": 3.114258328045548e-07, + "loss": 0.474, + "step": 19325 + }, + { + "epoch": 0.7974746224312949, + "grad_norm": 2.167159882078822, + "learning_rate": 3.113035482042561e-07, + "loss": 0.4751, + "step": 19326 + }, + { + "epoch": 0.7975158867706528, + "grad_norm": 2.1797877712203833, + "learning_rate": 3.1118128483704914e-07, + "loss": 0.4388, + "step": 19327 + }, + { + "epoch": 0.7975571511100107, + "grad_norm": 8.084176672801258, + "learning_rate": 3.110590427051186e-07, + "loss": 0.4907, + "step": 19328 + }, + { + "epoch": 0.7975984154493686, + "grad_norm": 3.080154695017708, + "learning_rate": 3.1093682181064765e-07, + "loss": 0.5265, + "step": 19329 + }, + { + "epoch": 0.7976396797887266, + "grad_norm": 1.8489454844832953, + "learning_rate": 3.108146221558201e-07, + "loss": 0.535, + "step": 19330 + }, + { + "epoch": 0.7976809441280845, + "grad_norm": 3.9550899460647138, + "learning_rate": 3.10692443742818e-07, + "loss": 0.5344, + "step": 19331 + }, + { + "epoch": 0.7977222084674425, + "grad_norm": 3.465877059621112, + "learning_rate": 3.1057028657382394e-07, + "loss": 0.4979, + "step": 19332 + }, + { + "epoch": 0.7977634728068004, + "grad_norm": 2.2735121517826142, + "learning_rate": 3.1044815065102e-07, + "loss": 0.5308, + "step": 19333 + }, + { + "epoch": 0.7978047371461583, + "grad_norm": 3.2727349467269393, + "learning_rate": 3.103260359765883e-07, + "loss": 0.4936, + "step": 19334 + }, + { + "epoch": 0.7978460014855162, + "grad_norm": 11.962740139276557, + "learning_rate": 3.1020394255270945e-07, + "loss": 0.5412, + "step": 19335 + }, + { + "epoch": 0.7978872658248741, + "grad_norm": 2.498954583676251, + "learning_rate": 3.100818703815645e-07, + "loss": 0.5322, + "step": 19336 + }, + { + "epoch": 0.797928530164232, + "grad_norm": 4.079923932602409, + "learning_rate": 3.0995981946533437e-07, + "loss": 0.4949, + "step": 19337 + }, + { + "epoch": 0.79796979450359, + "grad_norm": 3.87129811226576, + "learning_rate": 3.0983778980619865e-07, + "loss": 0.5219, + "step": 19338 + }, + { + "epoch": 0.7980110588429479, + "grad_norm": 3.801119167004995, + "learning_rate": 3.097157814063378e-07, + "loss": 0.5531, + "step": 19339 + }, + { + "epoch": 0.7980523231823059, + "grad_norm": 4.639351006495062, + "learning_rate": 3.095937942679302e-07, + "loss": 0.5018, + "step": 19340 + }, + { + "epoch": 0.7980935875216638, + "grad_norm": 3.1591804703611612, + "learning_rate": 3.0947182839315526e-07, + "loss": 0.5273, + "step": 19341 + }, + { + "epoch": 0.7981348518610217, + "grad_norm": 5.6779592155105405, + "learning_rate": 3.093498837841918e-07, + "loss": 0.5242, + "step": 19342 + }, + { + "epoch": 0.7981761162003796, + "grad_norm": 20.020222664232524, + "learning_rate": 3.092279604432185e-07, + "loss": 0.498, + "step": 19343 + }, + { + "epoch": 0.7982173805397376, + "grad_norm": 2.775522122904205, + "learning_rate": 3.0910605837241216e-07, + "loss": 0.5005, + "step": 19344 + }, + { + "epoch": 0.7982586448790955, + "grad_norm": 3.057662202099912, + "learning_rate": 3.0898417757395076e-07, + "loss": 0.5182, + "step": 19345 + }, + { + "epoch": 0.7982999092184534, + "grad_norm": 2.4332480421462686, + "learning_rate": 3.088623180500118e-07, + "loss": 0.5227, + "step": 19346 + }, + { + "epoch": 0.7983411735578113, + "grad_norm": 1.9420608088393976, + "learning_rate": 3.087404798027713e-07, + "loss": 0.4526, + "step": 19347 + }, + { + "epoch": 0.7983824378971692, + "grad_norm": 2.748033701865391, + "learning_rate": 3.086186628344059e-07, + "loss": 0.5244, + "step": 19348 + }, + { + "epoch": 0.7984237022365271, + "grad_norm": 2.269854462510728, + "learning_rate": 3.0849686714709157e-07, + "loss": 0.5304, + "step": 19349 + }, + { + "epoch": 0.7984649665758852, + "grad_norm": 8.240055879120552, + "learning_rate": 3.083750927430041e-07, + "loss": 0.458, + "step": 19350 + }, + { + "epoch": 0.7985062309152431, + "grad_norm": 2.6596422642137885, + "learning_rate": 3.082533396243186e-07, + "loss": 0.5264, + "step": 19351 + }, + { + "epoch": 0.798547495254601, + "grad_norm": 2.3378437189147374, + "learning_rate": 3.0813160779320946e-07, + "loss": 0.5422, + "step": 19352 + }, + { + "epoch": 0.7985887595939589, + "grad_norm": 3.1593772506909303, + "learning_rate": 3.080098972518513e-07, + "loss": 0.4677, + "step": 19353 + }, + { + "epoch": 0.7986300239333168, + "grad_norm": 3.464929682816145, + "learning_rate": 3.0788820800241817e-07, + "loss": 0.589, + "step": 19354 + }, + { + "epoch": 0.7986712882726748, + "grad_norm": 1.9431778707670948, + "learning_rate": 3.077665400470844e-07, + "loss": 0.4929, + "step": 19355 + }, + { + "epoch": 0.7987125526120327, + "grad_norm": 3.2601195158487393, + "learning_rate": 3.0764489338802233e-07, + "loss": 0.4807, + "step": 19356 + }, + { + "epoch": 0.7987538169513906, + "grad_norm": 3.953358706407135, + "learning_rate": 3.0752326802740526e-07, + "loss": 0.5015, + "step": 19357 + }, + { + "epoch": 0.7987950812907485, + "grad_norm": 6.285615029765861, + "learning_rate": 3.0740166396740566e-07, + "loss": 0.526, + "step": 19358 + }, + { + "epoch": 0.7988363456301064, + "grad_norm": 4.715687381606751, + "learning_rate": 3.072800812101962e-07, + "loss": 0.4837, + "step": 19359 + }, + { + "epoch": 0.7988776099694644, + "grad_norm": 2.2271375446400583, + "learning_rate": 3.0715851975794777e-07, + "loss": 0.4932, + "step": 19360 + }, + { + "epoch": 0.7989188743088224, + "grad_norm": 2.328539393714841, + "learning_rate": 3.070369796128324e-07, + "loss": 0.5312, + "step": 19361 + }, + { + "epoch": 0.7989601386481803, + "grad_norm": 3.6121016650396944, + "learning_rate": 3.069154607770208e-07, + "loss": 0.5003, + "step": 19362 + }, + { + "epoch": 0.7990014029875382, + "grad_norm": 4.213109366935557, + "learning_rate": 3.06793963252684e-07, + "loss": 0.5044, + "step": 19363 + }, + { + "epoch": 0.7990426673268961, + "grad_norm": 3.9164256642680564, + "learning_rate": 3.066724870419922e-07, + "loss": 0.5453, + "step": 19364 + }, + { + "epoch": 0.799083931666254, + "grad_norm": 4.059733666637834, + "learning_rate": 3.065510321471143e-07, + "loss": 0.5131, + "step": 19365 + }, + { + "epoch": 0.7991251960056119, + "grad_norm": 2.4347926663574744, + "learning_rate": 3.0642959857022083e-07, + "loss": 0.5158, + "step": 19366 + }, + { + "epoch": 0.7991664603449699, + "grad_norm": 2.822989436193465, + "learning_rate": 3.0630818631348084e-07, + "loss": 0.4758, + "step": 19367 + }, + { + "epoch": 0.7992077246843278, + "grad_norm": 21.17935556504871, + "learning_rate": 3.061867953790624e-07, + "loss": 0.4863, + "step": 19368 + }, + { + "epoch": 0.7992489890236857, + "grad_norm": 7.239222593179242, + "learning_rate": 3.060654257691343e-07, + "loss": 0.4698, + "step": 19369 + }, + { + "epoch": 0.7992902533630437, + "grad_norm": 2.299718790310467, + "learning_rate": 3.059440774858645e-07, + "loss": 0.4922, + "step": 19370 + }, + { + "epoch": 0.7993315177024016, + "grad_norm": 3.3353741273988144, + "learning_rate": 3.058227505314209e-07, + "loss": 0.5067, + "step": 19371 + }, + { + "epoch": 0.7993727820417595, + "grad_norm": 3.0279883741199467, + "learning_rate": 3.057014449079699e-07, + "loss": 0.5456, + "step": 19372 + }, + { + "epoch": 0.7994140463811175, + "grad_norm": 3.2372052310366146, + "learning_rate": 3.0558016061767883e-07, + "loss": 0.5017, + "step": 19373 + }, + { + "epoch": 0.7994553107204754, + "grad_norm": 8.898620692958378, + "learning_rate": 3.0545889766271414e-07, + "loss": 0.4899, + "step": 19374 + }, + { + "epoch": 0.7994965750598333, + "grad_norm": 2.0752975552752733, + "learning_rate": 3.05337656045242e-07, + "loss": 0.4666, + "step": 19375 + }, + { + "epoch": 0.7995378393991912, + "grad_norm": 7.8031965623791155, + "learning_rate": 3.0521643576742775e-07, + "loss": 0.509, + "step": 19376 + }, + { + "epoch": 0.7995791037385491, + "grad_norm": 2.257102338818722, + "learning_rate": 3.0509523683143714e-07, + "loss": 0.5112, + "step": 19377 + }, + { + "epoch": 0.799620368077907, + "grad_norm": 2.6298865262045994, + "learning_rate": 3.0497405923943426e-07, + "loss": 0.5038, + "step": 19378 + }, + { + "epoch": 0.799661632417265, + "grad_norm": 2.506727591986958, + "learning_rate": 3.0485290299358444e-07, + "loss": 0.5393, + "step": 19379 + }, + { + "epoch": 0.7997028967566229, + "grad_norm": 6.122603169866892, + "learning_rate": 3.0473176809605185e-07, + "loss": 0.5103, + "step": 19380 + }, + { + "epoch": 0.7997441610959809, + "grad_norm": 2.566557615007716, + "learning_rate": 3.046106545489995e-07, + "loss": 0.5224, + "step": 19381 + }, + { + "epoch": 0.7997854254353388, + "grad_norm": 3.2656209989650904, + "learning_rate": 3.044895623545915e-07, + "loss": 0.4705, + "step": 19382 + }, + { + "epoch": 0.7998266897746967, + "grad_norm": 7.611872806664445, + "learning_rate": 3.0436849151499036e-07, + "loss": 0.4908, + "step": 19383 + }, + { + "epoch": 0.7998679541140546, + "grad_norm": 2.4901251135882614, + "learning_rate": 3.042474420323596e-07, + "loss": 0.4443, + "step": 19384 + }, + { + "epoch": 0.7999092184534126, + "grad_norm": 8.345251843663556, + "learning_rate": 3.041264139088603e-07, + "loss": 0.5309, + "step": 19385 + }, + { + "epoch": 0.7999504827927705, + "grad_norm": 2.9375144501958763, + "learning_rate": 3.040054071466549e-07, + "loss": 0.5666, + "step": 19386 + }, + { + "epoch": 0.7999917471321284, + "grad_norm": 5.9705223183274265, + "learning_rate": 3.038844217479052e-07, + "loss": 0.5127, + "step": 19387 + }, + { + "epoch": 0.8000330114714863, + "grad_norm": 3.2223846070935305, + "learning_rate": 3.037634577147716e-07, + "loss": 0.5262, + "step": 19388 + }, + { + "epoch": 0.8000742758108442, + "grad_norm": 2.802761320702831, + "learning_rate": 3.036425150494152e-07, + "loss": 0.4808, + "step": 19389 + }, + { + "epoch": 0.8001155401502021, + "grad_norm": 3.108448747674346, + "learning_rate": 3.035215937539966e-07, + "loss": 0.5106, + "step": 19390 + }, + { + "epoch": 0.8001568044895602, + "grad_norm": 4.0103544453776125, + "learning_rate": 3.0340069383067516e-07, + "loss": 0.5477, + "step": 19391 + }, + { + "epoch": 0.8001980688289181, + "grad_norm": 10.08004756475456, + "learning_rate": 3.032798152816112e-07, + "loss": 0.4784, + "step": 19392 + }, + { + "epoch": 0.800239333168276, + "grad_norm": 9.485244163095143, + "learning_rate": 3.0315895810896306e-07, + "loss": 0.4773, + "step": 19393 + }, + { + "epoch": 0.8002805975076339, + "grad_norm": 2.5266580581523104, + "learning_rate": 3.030381223148898e-07, + "loss": 0.531, + "step": 19394 + }, + { + "epoch": 0.8003218618469918, + "grad_norm": 2.9498478675837116, + "learning_rate": 3.0291730790155025e-07, + "loss": 0.5511, + "step": 19395 + }, + { + "epoch": 0.8003631261863497, + "grad_norm": 4.967023931470311, + "learning_rate": 3.027965148711025e-07, + "loss": 0.4778, + "step": 19396 + }, + { + "epoch": 0.8004043905257077, + "grad_norm": 2.9591198049188145, + "learning_rate": 3.0267574322570344e-07, + "loss": 0.5226, + "step": 19397 + }, + { + "epoch": 0.8004456548650656, + "grad_norm": 3.544102601011885, + "learning_rate": 3.025549929675109e-07, + "loss": 0.4977, + "step": 19398 + }, + { + "epoch": 0.8004869192044235, + "grad_norm": 4.9987842403620855, + "learning_rate": 3.024342640986819e-07, + "loss": 0.4495, + "step": 19399 + }, + { + "epoch": 0.8005281835437814, + "grad_norm": 4.111970159737533, + "learning_rate": 3.0231355662137315e-07, + "loss": 0.5014, + "step": 19400 + }, + { + "epoch": 0.8005694478831394, + "grad_norm": 3.102216598646541, + "learning_rate": 3.0219287053774e-07, + "loss": 0.565, + "step": 19401 + }, + { + "epoch": 0.8006107122224974, + "grad_norm": 4.029249814420124, + "learning_rate": 3.0207220584993854e-07, + "loss": 0.5529, + "step": 19402 + }, + { + "epoch": 0.8006519765618553, + "grad_norm": 3.0278294372167243, + "learning_rate": 3.019515625601247e-07, + "loss": 0.5006, + "step": 19403 + }, + { + "epoch": 0.8006932409012132, + "grad_norm": 10.87469655128272, + "learning_rate": 3.0183094067045296e-07, + "loss": 0.5323, + "step": 19404 + }, + { + "epoch": 0.8007345052405711, + "grad_norm": 4.575607835062813, + "learning_rate": 3.017103401830777e-07, + "loss": 0.5224, + "step": 19405 + }, + { + "epoch": 0.800775769579929, + "grad_norm": 3.106054782209127, + "learning_rate": 3.015897611001533e-07, + "loss": 0.5429, + "step": 19406 + }, + { + "epoch": 0.8008170339192869, + "grad_norm": 2.43441784736442, + "learning_rate": 3.014692034238339e-07, + "loss": 0.47, + "step": 19407 + }, + { + "epoch": 0.8008582982586449, + "grad_norm": 15.300576505695403, + "learning_rate": 3.013486671562731e-07, + "loss": 0.5613, + "step": 19408 + }, + { + "epoch": 0.8008995625980028, + "grad_norm": 2.845232049401232, + "learning_rate": 3.0122815229962316e-07, + "loss": 0.5101, + "step": 19409 + }, + { + "epoch": 0.8009408269373607, + "grad_norm": 4.309557555216296, + "learning_rate": 3.011076588560374e-07, + "loss": 0.4833, + "step": 19410 + }, + { + "epoch": 0.8009820912767187, + "grad_norm": 2.0137460454470735, + "learning_rate": 3.0098718682766803e-07, + "loss": 0.4578, + "step": 19411 + }, + { + "epoch": 0.8010233556160766, + "grad_norm": 5.037575662099958, + "learning_rate": 3.0086673621666736e-07, + "loss": 0.4976, + "step": 19412 + }, + { + "epoch": 0.8010646199554345, + "grad_norm": 6.386472079387288, + "learning_rate": 3.007463070251861e-07, + "loss": 0.5106, + "step": 19413 + }, + { + "epoch": 0.8011058842947925, + "grad_norm": 2.3929248716734115, + "learning_rate": 3.0062589925537605e-07, + "loss": 0.4617, + "step": 19414 + }, + { + "epoch": 0.8011471486341504, + "grad_norm": 8.389877604358947, + "learning_rate": 3.0050551290938755e-07, + "loss": 0.5266, + "step": 19415 + }, + { + "epoch": 0.8011884129735083, + "grad_norm": 3.669044070976827, + "learning_rate": 3.003851479893719e-07, + "loss": 0.4962, + "step": 19416 + }, + { + "epoch": 0.8012296773128662, + "grad_norm": 3.114377579653613, + "learning_rate": 3.0026480449747836e-07, + "loss": 0.4814, + "step": 19417 + }, + { + "epoch": 0.8012709416522241, + "grad_norm": 2.5885471462187515, + "learning_rate": 3.001444824358562e-07, + "loss": 0.5529, + "step": 19418 + }, + { + "epoch": 0.801312205991582, + "grad_norm": 9.247972259120061, + "learning_rate": 3.0002418180665535e-07, + "loss": 0.5283, + "step": 19419 + }, + { + "epoch": 0.80135347033094, + "grad_norm": 2.289914453245503, + "learning_rate": 2.9990390261202434e-07, + "loss": 0.5394, + "step": 19420 + }, + { + "epoch": 0.801394734670298, + "grad_norm": 2.202861070015344, + "learning_rate": 2.997836448541124e-07, + "loss": 0.5746, + "step": 19421 + }, + { + "epoch": 0.8014359990096559, + "grad_norm": 2.713467232725692, + "learning_rate": 2.996634085350664e-07, + "loss": 0.4972, + "step": 19422 + }, + { + "epoch": 0.8014772633490138, + "grad_norm": 2.378730713586403, + "learning_rate": 2.995431936570349e-07, + "loss": 0.5239, + "step": 19423 + }, + { + "epoch": 0.8015185276883717, + "grad_norm": 2.03861746938266, + "learning_rate": 2.9942300022216535e-07, + "loss": 0.5099, + "step": 19424 + }, + { + "epoch": 0.8015597920277296, + "grad_norm": 2.0006332607767403, + "learning_rate": 2.9930282823260396e-07, + "loss": 0.5053, + "step": 19425 + }, + { + "epoch": 0.8016010563670876, + "grad_norm": 4.709961837577086, + "learning_rate": 2.991826776904979e-07, + "loss": 0.5148, + "step": 19426 + }, + { + "epoch": 0.8016423207064455, + "grad_norm": 4.708520244181323, + "learning_rate": 2.990625485979931e-07, + "loss": 0.5432, + "step": 19427 + }, + { + "epoch": 0.8016835850458034, + "grad_norm": 2.129247552389291, + "learning_rate": 2.989424409572358e-07, + "loss": 0.4872, + "step": 19428 + }, + { + "epoch": 0.8017248493851613, + "grad_norm": 5.690025482764818, + "learning_rate": 2.988223547703712e-07, + "loss": 0.5329, + "step": 19429 + }, + { + "epoch": 0.8017661137245192, + "grad_norm": 3.9370584768340597, + "learning_rate": 2.987022900395437e-07, + "loss": 0.5107, + "step": 19430 + }, + { + "epoch": 0.8018073780638773, + "grad_norm": 2.24310169481566, + "learning_rate": 2.9858224676689867e-07, + "loss": 0.5331, + "step": 19431 + }, + { + "epoch": 0.8018486424032352, + "grad_norm": 4.560574672583874, + "learning_rate": 2.984622249545801e-07, + "loss": 0.48, + "step": 19432 + }, + { + "epoch": 0.8018899067425931, + "grad_norm": 3.1105188305760527, + "learning_rate": 2.983422246047325e-07, + "loss": 0.4905, + "step": 19433 + }, + { + "epoch": 0.801931171081951, + "grad_norm": 10.643798742296763, + "learning_rate": 2.982222457194983e-07, + "loss": 0.5234, + "step": 19434 + }, + { + "epoch": 0.8019724354213089, + "grad_norm": 3.060988651278205, + "learning_rate": 2.9810228830102115e-07, + "loss": 0.5316, + "step": 19435 + }, + { + "epoch": 0.8020136997606668, + "grad_norm": 2.6243174855148137, + "learning_rate": 2.97982352351444e-07, + "loss": 0.471, + "step": 19436 + }, + { + "epoch": 0.8020549641000247, + "grad_norm": 2.332431952940226, + "learning_rate": 2.9786243787290927e-07, + "loss": 0.4712, + "step": 19437 + }, + { + "epoch": 0.8020962284393827, + "grad_norm": 2.928880123098362, + "learning_rate": 2.977425448675583e-07, + "loss": 0.5068, + "step": 19438 + }, + { + "epoch": 0.8021374927787406, + "grad_norm": 2.5960653225061843, + "learning_rate": 2.976226733375332e-07, + "loss": 0.4851, + "step": 19439 + }, + { + "epoch": 0.8021787571180985, + "grad_norm": 2.0120459324407247, + "learning_rate": 2.9750282328497485e-07, + "loss": 0.4847, + "step": 19440 + }, + { + "epoch": 0.8022200214574564, + "grad_norm": 2.6738961900352227, + "learning_rate": 2.973829947120248e-07, + "loss": 0.5037, + "step": 19441 + }, + { + "epoch": 0.8022612857968144, + "grad_norm": 3.0265637512947823, + "learning_rate": 2.9726318762082245e-07, + "loss": 0.4548, + "step": 19442 + }, + { + "epoch": 0.8023025501361724, + "grad_norm": 4.256400008876265, + "learning_rate": 2.9714340201350894e-07, + "loss": 0.5248, + "step": 19443 + }, + { + "epoch": 0.8023438144755303, + "grad_norm": 7.455573908127549, + "learning_rate": 2.970236378922227e-07, + "loss": 0.4784, + "step": 19444 + }, + { + "epoch": 0.8023850788148882, + "grad_norm": 5.684701246219696, + "learning_rate": 2.969038952591042e-07, + "loss": 0.5287, + "step": 19445 + }, + { + "epoch": 0.8024263431542461, + "grad_norm": 6.248906403691194, + "learning_rate": 2.967841741162914e-07, + "loss": 0.4562, + "step": 19446 + }, + { + "epoch": 0.802467607493604, + "grad_norm": 6.77782637896264, + "learning_rate": 2.966644744659233e-07, + "loss": 0.5079, + "step": 19447 + }, + { + "epoch": 0.8025088718329619, + "grad_norm": 4.3415341990045455, + "learning_rate": 2.96544796310138e-07, + "loss": 0.5539, + "step": 19448 + }, + { + "epoch": 0.8025501361723199, + "grad_norm": 2.317666765553928, + "learning_rate": 2.964251396510735e-07, + "loss": 0.5311, + "step": 19449 + }, + { + "epoch": 0.8025914005116778, + "grad_norm": 2.661565941322279, + "learning_rate": 2.9630550449086664e-07, + "loss": 0.4771, + "step": 19450 + }, + { + "epoch": 0.8026326648510357, + "grad_norm": 3.6108508148379124, + "learning_rate": 2.9618589083165456e-07, + "loss": 0.5069, + "step": 19451 + }, + { + "epoch": 0.8026739291903937, + "grad_norm": 2.806863006713442, + "learning_rate": 2.960662986755739e-07, + "loss": 0.5015, + "step": 19452 + }, + { + "epoch": 0.8027151935297516, + "grad_norm": 2.2128437266977414, + "learning_rate": 2.959467280247613e-07, + "loss": 0.4874, + "step": 19453 + }, + { + "epoch": 0.8027564578691095, + "grad_norm": 1.9924422598385483, + "learning_rate": 2.958271788813519e-07, + "loss": 0.5093, + "step": 19454 + }, + { + "epoch": 0.8027977222084675, + "grad_norm": 4.992521992803643, + "learning_rate": 2.9570765124748153e-07, + "loss": 0.4829, + "step": 19455 + }, + { + "epoch": 0.8028389865478254, + "grad_norm": 3.5829421309108738, + "learning_rate": 2.955881451252854e-07, + "loss": 0.5368, + "step": 19456 + }, + { + "epoch": 0.8028802508871833, + "grad_norm": 6.897759482401628, + "learning_rate": 2.954686605168976e-07, + "loss": 0.5669, + "step": 19457 + }, + { + "epoch": 0.8029215152265412, + "grad_norm": 5.582000802183365, + "learning_rate": 2.9534919742445346e-07, + "loss": 0.5063, + "step": 19458 + }, + { + "epoch": 0.8029627795658991, + "grad_norm": 3.0527952313395077, + "learning_rate": 2.9522975585008554e-07, + "loss": 0.5534, + "step": 19459 + }, + { + "epoch": 0.803004043905257, + "grad_norm": 2.302130654376374, + "learning_rate": 2.951103357959281e-07, + "loss": 0.5137, + "step": 19460 + }, + { + "epoch": 0.803045308244615, + "grad_norm": 3.9262097536188847, + "learning_rate": 2.9499093726411444e-07, + "loss": 0.5351, + "step": 19461 + }, + { + "epoch": 0.803086572583973, + "grad_norm": 17.299209071961233, + "learning_rate": 2.9487156025677723e-07, + "loss": 0.487, + "step": 19462 + }, + { + "epoch": 0.8031278369233309, + "grad_norm": 2.208868898941366, + "learning_rate": 2.947522047760486e-07, + "loss": 0.5201, + "step": 19463 + }, + { + "epoch": 0.8031691012626888, + "grad_norm": 2.808512240543536, + "learning_rate": 2.9463287082406053e-07, + "loss": 0.4528, + "step": 19464 + }, + { + "epoch": 0.8032103656020467, + "grad_norm": 5.488119255387606, + "learning_rate": 2.9451355840294506e-07, + "loss": 0.4589, + "step": 19465 + }, + { + "epoch": 0.8032516299414046, + "grad_norm": 4.658926350262688, + "learning_rate": 2.943942675148328e-07, + "loss": 0.5449, + "step": 19466 + }, + { + "epoch": 0.8032928942807626, + "grad_norm": 2.3141654167219508, + "learning_rate": 2.9427499816185495e-07, + "loss": 0.5149, + "step": 19467 + }, + { + "epoch": 0.8033341586201205, + "grad_norm": 2.708131562361789, + "learning_rate": 2.9415575034614184e-07, + "loss": 0.5178, + "step": 19468 + }, + { + "epoch": 0.8033754229594784, + "grad_norm": 2.1219789031776264, + "learning_rate": 2.9403652406982383e-07, + "loss": 0.4813, + "step": 19469 + }, + { + "epoch": 0.8034166872988363, + "grad_norm": 3.2546427299092824, + "learning_rate": 2.939173193350306e-07, + "loss": 0.5207, + "step": 19470 + }, + { + "epoch": 0.8034579516381942, + "grad_norm": 5.882422592693179, + "learning_rate": 2.937981361438907e-07, + "loss": 0.4995, + "step": 19471 + }, + { + "epoch": 0.8034992159775523, + "grad_norm": 7.71767176825631, + "learning_rate": 2.936789744985334e-07, + "loss": 0.568, + "step": 19472 + }, + { + "epoch": 0.8035404803169102, + "grad_norm": 2.749998633440269, + "learning_rate": 2.9355983440108765e-07, + "loss": 0.4997, + "step": 19473 + }, + { + "epoch": 0.8035817446562681, + "grad_norm": 3.4299156076506394, + "learning_rate": 2.934407158536816e-07, + "loss": 0.528, + "step": 19474 + }, + { + "epoch": 0.803623008995626, + "grad_norm": 2.8686658330573684, + "learning_rate": 2.933216188584423e-07, + "loss": 0.5667, + "step": 19475 + }, + { + "epoch": 0.8036642733349839, + "grad_norm": 2.569428473685504, + "learning_rate": 2.932025434174975e-07, + "loss": 0.4817, + "step": 19476 + }, + { + "epoch": 0.8037055376743418, + "grad_norm": 138.30059268338505, + "learning_rate": 2.930834895329741e-07, + "loss": 0.4786, + "step": 19477 + }, + { + "epoch": 0.8037468020136997, + "grad_norm": 7.008458775591874, + "learning_rate": 2.929644572069994e-07, + "loss": 0.4896, + "step": 19478 + }, + { + "epoch": 0.8037880663530577, + "grad_norm": 3.603436283991861, + "learning_rate": 2.9284544644169865e-07, + "loss": 0.4802, + "step": 19479 + }, + { + "epoch": 0.8038293306924156, + "grad_norm": 8.530678362049327, + "learning_rate": 2.9272645723919804e-07, + "loss": 0.4859, + "step": 19480 + }, + { + "epoch": 0.8038705950317735, + "grad_norm": 4.5904683814901945, + "learning_rate": 2.9260748960162296e-07, + "loss": 0.5355, + "step": 19481 + }, + { + "epoch": 0.8039118593711315, + "grad_norm": 2.658005543187455, + "learning_rate": 2.924885435310994e-07, + "loss": 0.4616, + "step": 19482 + }, + { + "epoch": 0.8039531237104894, + "grad_norm": 2.8218945903379833, + "learning_rate": 2.923696190297504e-07, + "loss": 0.4936, + "step": 19483 + }, + { + "epoch": 0.8039943880498474, + "grad_norm": 2.8613358104463424, + "learning_rate": 2.92250716099701e-07, + "loss": 0.5347, + "step": 19484 + }, + { + "epoch": 0.8040356523892053, + "grad_norm": 3.8773634266900996, + "learning_rate": 2.9213183474307516e-07, + "loss": 0.5445, + "step": 19485 + }, + { + "epoch": 0.8040769167285632, + "grad_norm": 2.443904082168595, + "learning_rate": 2.920129749619966e-07, + "loss": 0.5421, + "step": 19486 + }, + { + "epoch": 0.8041181810679211, + "grad_norm": 5.051868836323354, + "learning_rate": 2.918941367585881e-07, + "loss": 0.5213, + "step": 19487 + }, + { + "epoch": 0.804159445407279, + "grad_norm": 5.107922381015759, + "learning_rate": 2.9177532013497237e-07, + "loss": 0.5596, + "step": 19488 + }, + { + "epoch": 0.8042007097466369, + "grad_norm": 4.6402728322904645, + "learning_rate": 2.9165652509327204e-07, + "loss": 0.5052, + "step": 19489 + }, + { + "epoch": 0.8042419740859948, + "grad_norm": 3.414297815400099, + "learning_rate": 2.915377516356093e-07, + "loss": 0.4972, + "step": 19490 + }, + { + "epoch": 0.8042832384253528, + "grad_norm": 2.4348258616202716, + "learning_rate": 2.9141899976410505e-07, + "loss": 0.5475, + "step": 19491 + }, + { + "epoch": 0.8043245027647108, + "grad_norm": 3.199017107976424, + "learning_rate": 2.9130026948088103e-07, + "loss": 0.5381, + "step": 19492 + }, + { + "epoch": 0.8043657671040687, + "grad_norm": 2.5206070160737575, + "learning_rate": 2.9118156078805773e-07, + "loss": 0.4682, + "step": 19493 + }, + { + "epoch": 0.8044070314434266, + "grad_norm": 3.092938213047338, + "learning_rate": 2.910628736877561e-07, + "loss": 0.4869, + "step": 19494 + }, + { + "epoch": 0.8044482957827845, + "grad_norm": 4.858150217942959, + "learning_rate": 2.90944208182096e-07, + "loss": 0.5471, + "step": 19495 + }, + { + "epoch": 0.8044895601221425, + "grad_norm": 2.1576906805905316, + "learning_rate": 2.9082556427319647e-07, + "loss": 0.4997, + "step": 19496 + }, + { + "epoch": 0.8045308244615004, + "grad_norm": 2.704831525540899, + "learning_rate": 2.907069419631771e-07, + "loss": 0.5027, + "step": 19497 + }, + { + "epoch": 0.8045720888008583, + "grad_norm": 2.504634856218627, + "learning_rate": 2.9058834125415724e-07, + "loss": 0.4983, + "step": 19498 + }, + { + "epoch": 0.8046133531402162, + "grad_norm": 3.0595797232157556, + "learning_rate": 2.9046976214825513e-07, + "loss": 0.4842, + "step": 19499 + }, + { + "epoch": 0.8046546174795741, + "grad_norm": 5.496058995860165, + "learning_rate": 2.903512046475887e-07, + "loss": 0.5344, + "step": 19500 + }, + { + "epoch": 0.804695881818932, + "grad_norm": 2.4561865582241893, + "learning_rate": 2.9023266875427576e-07, + "loss": 0.5474, + "step": 19501 + }, + { + "epoch": 0.80473714615829, + "grad_norm": 3.474555752322659, + "learning_rate": 2.901141544704343e-07, + "loss": 0.5262, + "step": 19502 + }, + { + "epoch": 0.804778410497648, + "grad_norm": 3.9852628178555114, + "learning_rate": 2.899956617981801e-07, + "loss": 0.4801, + "step": 19503 + }, + { + "epoch": 0.8048196748370059, + "grad_norm": 5.018985232459274, + "learning_rate": 2.898771907396304e-07, + "loss": 0.5272, + "step": 19504 + }, + { + "epoch": 0.8048609391763638, + "grad_norm": 3.1231744985686785, + "learning_rate": 2.8975874129690136e-07, + "loss": 0.5246, + "step": 19505 + }, + { + "epoch": 0.8049022035157217, + "grad_norm": 2.597303682284112, + "learning_rate": 2.8964031347210913e-07, + "loss": 0.4943, + "step": 19506 + }, + { + "epoch": 0.8049434678550796, + "grad_norm": 3.467121591093697, + "learning_rate": 2.8952190726736843e-07, + "loss": 0.5089, + "step": 19507 + }, + { + "epoch": 0.8049847321944376, + "grad_norm": 3.504726836013705, + "learning_rate": 2.894035226847948e-07, + "loss": 0.5427, + "step": 19508 + }, + { + "epoch": 0.8050259965337955, + "grad_norm": 2.4473660747282966, + "learning_rate": 2.892851597265025e-07, + "loss": 0.4642, + "step": 19509 + }, + { + "epoch": 0.8050672608731534, + "grad_norm": 3.5374366040477256, + "learning_rate": 2.8916681839460595e-07, + "loss": 0.5125, + "step": 19510 + }, + { + "epoch": 0.8051085252125113, + "grad_norm": 2.5422572956367797, + "learning_rate": 2.890484986912195e-07, + "loss": 0.5119, + "step": 19511 + }, + { + "epoch": 0.8051497895518692, + "grad_norm": 2.630385597542835, + "learning_rate": 2.889302006184559e-07, + "loss": 0.5355, + "step": 19512 + }, + { + "epoch": 0.8051910538912272, + "grad_norm": 4.356102812231552, + "learning_rate": 2.8881192417842833e-07, + "loss": 0.5491, + "step": 19513 + }, + { + "epoch": 0.8052323182305852, + "grad_norm": 2.8432636629610206, + "learning_rate": 2.886936693732499e-07, + "loss": 0.4726, + "step": 19514 + }, + { + "epoch": 0.8052735825699431, + "grad_norm": 3.6835128122388423, + "learning_rate": 2.88575436205033e-07, + "loss": 0.5529, + "step": 19515 + }, + { + "epoch": 0.805314846909301, + "grad_norm": 2.9451485906131425, + "learning_rate": 2.884572246758889e-07, + "loss": 0.5237, + "step": 19516 + }, + { + "epoch": 0.8053561112486589, + "grad_norm": 2.548784519716191, + "learning_rate": 2.8833903478792973e-07, + "loss": 0.5942, + "step": 19517 + }, + { + "epoch": 0.8053973755880168, + "grad_norm": 2.9298526139129013, + "learning_rate": 2.8822086654326654e-07, + "loss": 0.4736, + "step": 19518 + }, + { + "epoch": 0.8054386399273747, + "grad_norm": 1.772780434256717, + "learning_rate": 2.8810271994401045e-07, + "loss": 0.4749, + "step": 19519 + }, + { + "epoch": 0.8054799042667327, + "grad_norm": 4.320803287273768, + "learning_rate": 2.87984594992271e-07, + "loss": 0.4534, + "step": 19520 + }, + { + "epoch": 0.8055211686060906, + "grad_norm": 2.174517496579403, + "learning_rate": 2.878664916901591e-07, + "loss": 0.4905, + "step": 19521 + }, + { + "epoch": 0.8055624329454485, + "grad_norm": 4.620644104665754, + "learning_rate": 2.8774841003978347e-07, + "loss": 0.5168, + "step": 19522 + }, + { + "epoch": 0.8056036972848065, + "grad_norm": 3.4002164767351775, + "learning_rate": 2.8763035004325436e-07, + "loss": 0.488, + "step": 19523 + }, + { + "epoch": 0.8056449616241644, + "grad_norm": 3.1596402310118266, + "learning_rate": 2.8751231170267966e-07, + "loss": 0.4979, + "step": 19524 + }, + { + "epoch": 0.8056862259635224, + "grad_norm": 2.093771219997211, + "learning_rate": 2.873942950201683e-07, + "loss": 0.428, + "step": 19525 + }, + { + "epoch": 0.8057274903028803, + "grad_norm": 3.1782275207412565, + "learning_rate": 2.8727629999782825e-07, + "loss": 0.5287, + "step": 19526 + }, + { + "epoch": 0.8057687546422382, + "grad_norm": 4.238331533140713, + "learning_rate": 2.8715832663776764e-07, + "loss": 0.4808, + "step": 19527 + }, + { + "epoch": 0.8058100189815961, + "grad_norm": 3.2671035017522003, + "learning_rate": 2.87040374942093e-07, + "loss": 0.5145, + "step": 19528 + }, + { + "epoch": 0.805851283320954, + "grad_norm": 2.8220457343853673, + "learning_rate": 2.869224449129116e-07, + "loss": 0.5573, + "step": 19529 + }, + { + "epoch": 0.8058925476603119, + "grad_norm": 8.242358550216254, + "learning_rate": 2.8680453655233e-07, + "loss": 0.5493, + "step": 19530 + }, + { + "epoch": 0.8059338119996698, + "grad_norm": 3.3985836319071128, + "learning_rate": 2.8668664986245477e-07, + "loss": 0.5453, + "step": 19531 + }, + { + "epoch": 0.8059750763390278, + "grad_norm": 3.7449507501071513, + "learning_rate": 2.865687848453908e-07, + "loss": 0.5089, + "step": 19532 + }, + { + "epoch": 0.8060163406783858, + "grad_norm": 3.165917744232242, + "learning_rate": 2.864509415032438e-07, + "loss": 0.5202, + "step": 19533 + }, + { + "epoch": 0.8060576050177437, + "grad_norm": 3.375744085316013, + "learning_rate": 2.863331198381188e-07, + "loss": 0.5319, + "step": 19534 + }, + { + "epoch": 0.8060988693571016, + "grad_norm": 3.8447275368483194, + "learning_rate": 2.86215319852121e-07, + "loss": 0.5314, + "step": 19535 + }, + { + "epoch": 0.8061401336964595, + "grad_norm": 3.2335274076892344, + "learning_rate": 2.8609754154735383e-07, + "loss": 0.5426, + "step": 19536 + }, + { + "epoch": 0.8061813980358175, + "grad_norm": 2.9911905146336757, + "learning_rate": 2.8597978492592097e-07, + "loss": 0.5198, + "step": 19537 + }, + { + "epoch": 0.8062226623751754, + "grad_norm": 2.035366742638583, + "learning_rate": 2.858620499899262e-07, + "loss": 0.528, + "step": 19538 + }, + { + "epoch": 0.8062639267145333, + "grad_norm": 2.978988430304767, + "learning_rate": 2.857443367414724e-07, + "loss": 0.4657, + "step": 19539 + }, + { + "epoch": 0.8063051910538912, + "grad_norm": 7.3039035287568925, + "learning_rate": 2.856266451826628e-07, + "loss": 0.4894, + "step": 19540 + }, + { + "epoch": 0.8063464553932491, + "grad_norm": 4.98261447460519, + "learning_rate": 2.855089753155989e-07, + "loss": 0.5334, + "step": 19541 + }, + { + "epoch": 0.806387719732607, + "grad_norm": 2.3868484122247042, + "learning_rate": 2.853913271423828e-07, + "loss": 0.497, + "step": 19542 + }, + { + "epoch": 0.8064289840719651, + "grad_norm": 3.68728595316584, + "learning_rate": 2.8527370066511657e-07, + "loss": 0.4746, + "step": 19543 + }, + { + "epoch": 0.806470248411323, + "grad_norm": 3.54479487727992, + "learning_rate": 2.851560958859003e-07, + "loss": 0.484, + "step": 19544 + }, + { + "epoch": 0.8065115127506809, + "grad_norm": 3.4979216271311815, + "learning_rate": 2.850385128068352e-07, + "loss": 0.5248, + "step": 19545 + }, + { + "epoch": 0.8065527770900388, + "grad_norm": 2.8753786960108445, + "learning_rate": 2.8492095143002184e-07, + "loss": 0.5768, + "step": 19546 + }, + { + "epoch": 0.8065940414293967, + "grad_norm": 3.2629264416439074, + "learning_rate": 2.8480341175756027e-07, + "loss": 0.5207, + "step": 19547 + }, + { + "epoch": 0.8066353057687546, + "grad_norm": 2.6551607799250005, + "learning_rate": 2.846858937915496e-07, + "loss": 0.48, + "step": 19548 + }, + { + "epoch": 0.8066765701081126, + "grad_norm": 8.944526990325912, + "learning_rate": 2.845683975340888e-07, + "loss": 0.5548, + "step": 19549 + }, + { + "epoch": 0.8067178344474705, + "grad_norm": 6.222025131626708, + "learning_rate": 2.844509229872771e-07, + "loss": 0.4785, + "step": 19550 + }, + { + "epoch": 0.8067590987868284, + "grad_norm": 5.539270044687391, + "learning_rate": 2.843334701532125e-07, + "loss": 0.5036, + "step": 19551 + }, + { + "epoch": 0.8068003631261863, + "grad_norm": 6.112931046873531, + "learning_rate": 2.842160390339938e-07, + "loss": 0.5058, + "step": 19552 + }, + { + "epoch": 0.8068416274655443, + "grad_norm": 3.7697549058115554, + "learning_rate": 2.8409862963171766e-07, + "loss": 0.5686, + "step": 19553 + }, + { + "epoch": 0.8068828918049022, + "grad_norm": 3.142469941719169, + "learning_rate": 2.839812419484818e-07, + "loss": 0.4776, + "step": 19554 + }, + { + "epoch": 0.8069241561442602, + "grad_norm": 2.499690871293008, + "learning_rate": 2.838638759863828e-07, + "loss": 0.4765, + "step": 19555 + }, + { + "epoch": 0.8069654204836181, + "grad_norm": 2.4098350693324977, + "learning_rate": 2.837465317475177e-07, + "loss": 0.5, + "step": 19556 + }, + { + "epoch": 0.807006684822976, + "grad_norm": 2.7263631645042397, + "learning_rate": 2.836292092339818e-07, + "loss": 0.4961, + "step": 19557 + }, + { + "epoch": 0.8070479491623339, + "grad_norm": 3.2824301478717537, + "learning_rate": 2.8351190844787123e-07, + "loss": 0.5015, + "step": 19558 + }, + { + "epoch": 0.8070892135016918, + "grad_norm": 2.5696886634927503, + "learning_rate": 2.83394629391281e-07, + "loss": 0.4955, + "step": 19559 + }, + { + "epoch": 0.8071304778410497, + "grad_norm": 13.83643070537722, + "learning_rate": 2.832773720663065e-07, + "loss": 0.5172, + "step": 19560 + }, + { + "epoch": 0.8071717421804077, + "grad_norm": 3.353291916738073, + "learning_rate": 2.8316013647504214e-07, + "loss": 0.4656, + "step": 19561 + }, + { + "epoch": 0.8072130065197656, + "grad_norm": 2.2829580659142925, + "learning_rate": 2.8304292261958124e-07, + "loss": 0.5159, + "step": 19562 + }, + { + "epoch": 0.8072542708591235, + "grad_norm": 2.693941071285574, + "learning_rate": 2.829257305020182e-07, + "loss": 0.4881, + "step": 19563 + }, + { + "epoch": 0.8072955351984815, + "grad_norm": 4.0462380802073294, + "learning_rate": 2.828085601244466e-07, + "loss": 0.5206, + "step": 19564 + }, + { + "epoch": 0.8073367995378394, + "grad_norm": 3.865351088735158, + "learning_rate": 2.8269141148895864e-07, + "loss": 0.503, + "step": 19565 + }, + { + "epoch": 0.8073780638771973, + "grad_norm": 6.437705227101034, + "learning_rate": 2.8257428459764733e-07, + "loss": 0.5445, + "step": 19566 + }, + { + "epoch": 0.8074193282165553, + "grad_norm": 3.4416817202508554, + "learning_rate": 2.8245717945260495e-07, + "loss": 0.5264, + "step": 19567 + }, + { + "epoch": 0.8074605925559132, + "grad_norm": 6.391344739966585, + "learning_rate": 2.823400960559234e-07, + "loss": 0.5075, + "step": 19568 + }, + { + "epoch": 0.8075018568952711, + "grad_norm": 3.5519231736029324, + "learning_rate": 2.822230344096936e-07, + "loss": 0.4641, + "step": 19569 + }, + { + "epoch": 0.807543121234629, + "grad_norm": 3.162818983350616, + "learning_rate": 2.8210599451600665e-07, + "loss": 0.5801, + "step": 19570 + }, + { + "epoch": 0.8075843855739869, + "grad_norm": 2.3453545852213806, + "learning_rate": 2.8198897637695353e-07, + "loss": 0.5392, + "step": 19571 + }, + { + "epoch": 0.8076256499133448, + "grad_norm": 5.879703882769309, + "learning_rate": 2.818719799946245e-07, + "loss": 0.5178, + "step": 19572 + }, + { + "epoch": 0.8076669142527028, + "grad_norm": 2.7979315617478346, + "learning_rate": 2.817550053711089e-07, + "loss": 0.4756, + "step": 19573 + }, + { + "epoch": 0.8077081785920608, + "grad_norm": 3.518500330858686, + "learning_rate": 2.81638052508497e-07, + "loss": 0.53, + "step": 19574 + }, + { + "epoch": 0.8077494429314187, + "grad_norm": 3.337173834430024, + "learning_rate": 2.815211214088767e-07, + "loss": 0.5655, + "step": 19575 + }, + { + "epoch": 0.8077907072707766, + "grad_norm": 12.576235109900509, + "learning_rate": 2.8140421207433746e-07, + "loss": 0.5052, + "step": 19576 + }, + { + "epoch": 0.8078319716101345, + "grad_norm": 3.665887745294013, + "learning_rate": 2.81287324506968e-07, + "loss": 0.517, + "step": 19577 + }, + { + "epoch": 0.8078732359494925, + "grad_norm": 12.836151747212133, + "learning_rate": 2.811704587088551e-07, + "loss": 0.4724, + "step": 19578 + }, + { + "epoch": 0.8079145002888504, + "grad_norm": 2.8496511290170172, + "learning_rate": 2.8105361468208686e-07, + "loss": 0.4815, + "step": 19579 + }, + { + "epoch": 0.8079557646282083, + "grad_norm": 3.842442409726663, + "learning_rate": 2.8093679242875086e-07, + "loss": 0.5785, + "step": 19580 + }, + { + "epoch": 0.8079970289675662, + "grad_norm": 3.3042501069582055, + "learning_rate": 2.8081999195093294e-07, + "loss": 0.4797, + "step": 19581 + }, + { + "epoch": 0.8080382933069241, + "grad_norm": 2.6845029664542057, + "learning_rate": 2.8070321325071993e-07, + "loss": 0.5271, + "step": 19582 + }, + { + "epoch": 0.808079557646282, + "grad_norm": 3.176565993347497, + "learning_rate": 2.8058645633019775e-07, + "loss": 0.4649, + "step": 19583 + }, + { + "epoch": 0.8081208219856401, + "grad_norm": 4.269868270410809, + "learning_rate": 2.8046972119145233e-07, + "loss": 0.4878, + "step": 19584 + }, + { + "epoch": 0.808162086324998, + "grad_norm": 5.544803746205264, + "learning_rate": 2.8035300783656806e-07, + "loss": 0.5044, + "step": 19585 + }, + { + "epoch": 0.8082033506643559, + "grad_norm": 3.2568820335773174, + "learning_rate": 2.8023631626763007e-07, + "loss": 0.5502, + "step": 19586 + }, + { + "epoch": 0.8082446150037138, + "grad_norm": 12.801814252549816, + "learning_rate": 2.8011964648672343e-07, + "loss": 0.4949, + "step": 19587 + }, + { + "epoch": 0.8082858793430717, + "grad_norm": 2.6386315617446097, + "learning_rate": 2.80002998495931e-07, + "loss": 0.4673, + "step": 19588 + }, + { + "epoch": 0.8083271436824296, + "grad_norm": 2.636681962214087, + "learning_rate": 2.7988637229733765e-07, + "loss": 0.5272, + "step": 19589 + }, + { + "epoch": 0.8083684080217876, + "grad_norm": 12.959887671293945, + "learning_rate": 2.797697678930253e-07, + "loss": 0.5353, + "step": 19590 + }, + { + "epoch": 0.8084096723611455, + "grad_norm": 3.607370958662003, + "learning_rate": 2.7965318528507757e-07, + "loss": 0.4885, + "step": 19591 + }, + { + "epoch": 0.8084509367005034, + "grad_norm": 2.506355423367705, + "learning_rate": 2.795366244755768e-07, + "loss": 0.5003, + "step": 19592 + }, + { + "epoch": 0.8084922010398613, + "grad_norm": 5.121573119417019, + "learning_rate": 2.794200854666053e-07, + "loss": 0.4812, + "step": 19593 + }, + { + "epoch": 0.8085334653792193, + "grad_norm": 5.745493609845672, + "learning_rate": 2.7930356826024435e-07, + "loss": 0.504, + "step": 19594 + }, + { + "epoch": 0.8085747297185772, + "grad_norm": 7.135226929996224, + "learning_rate": 2.7918707285857506e-07, + "loss": 0.4796, + "step": 19595 + }, + { + "epoch": 0.8086159940579352, + "grad_norm": 3.3477362464982163, + "learning_rate": 2.790705992636789e-07, + "loss": 0.5285, + "step": 19596 + }, + { + "epoch": 0.8086572583972931, + "grad_norm": 6.556744098110639, + "learning_rate": 2.7895414747763643e-07, + "loss": 0.5048, + "step": 19597 + }, + { + "epoch": 0.808698522736651, + "grad_norm": 21.433723073496452, + "learning_rate": 2.788377175025272e-07, + "loss": 0.506, + "step": 19598 + }, + { + "epoch": 0.8087397870760089, + "grad_norm": 8.408415030937896, + "learning_rate": 2.7872130934043115e-07, + "loss": 0.5368, + "step": 19599 + }, + { + "epoch": 0.8087810514153668, + "grad_norm": 2.82857729885102, + "learning_rate": 2.7860492299342807e-07, + "loss": 0.5659, + "step": 19600 + }, + { + "epoch": 0.8088223157547247, + "grad_norm": 10.010399342060454, + "learning_rate": 2.784885584635965e-07, + "loss": 0.5073, + "step": 19601 + }, + { + "epoch": 0.8088635800940827, + "grad_norm": 7.720477048506321, + "learning_rate": 2.783722157530145e-07, + "loss": 0.5424, + "step": 19602 + }, + { + "epoch": 0.8089048444334406, + "grad_norm": 2.4177442212909765, + "learning_rate": 2.78255894863761e-07, + "loss": 0.4726, + "step": 19603 + }, + { + "epoch": 0.8089461087727986, + "grad_norm": 3.577026486032643, + "learning_rate": 2.781395957979135e-07, + "loss": 0.5209, + "step": 19604 + }, + { + "epoch": 0.8089873731121565, + "grad_norm": 18.77653891881047, + "learning_rate": 2.7802331855754977e-07, + "loss": 0.4649, + "step": 19605 + }, + { + "epoch": 0.8090286374515144, + "grad_norm": 3.382875597887012, + "learning_rate": 2.7790706314474614e-07, + "loss": 0.5291, + "step": 19606 + }, + { + "epoch": 0.8090699017908723, + "grad_norm": 2.6767178564627523, + "learning_rate": 2.777908295615796e-07, + "loss": 0.5573, + "step": 19607 + }, + { + "epoch": 0.8091111661302303, + "grad_norm": 6.814252819274268, + "learning_rate": 2.7767461781012634e-07, + "loss": 0.4366, + "step": 19608 + }, + { + "epoch": 0.8091524304695882, + "grad_norm": 2.1241150528970234, + "learning_rate": 2.775584278924625e-07, + "loss": 0.4661, + "step": 19609 + }, + { + "epoch": 0.8091936948089461, + "grad_norm": 3.84642928102899, + "learning_rate": 2.7744225981066296e-07, + "loss": 0.4886, + "step": 19610 + }, + { + "epoch": 0.809234959148304, + "grad_norm": 3.209013687071909, + "learning_rate": 2.773261135668029e-07, + "loss": 0.4986, + "step": 19611 + }, + { + "epoch": 0.8092762234876619, + "grad_norm": 9.780628052688472, + "learning_rate": 2.7720998916295727e-07, + "loss": 0.5141, + "step": 19612 + }, + { + "epoch": 0.8093174878270198, + "grad_norm": 2.1659422948262725, + "learning_rate": 2.7709388660120044e-07, + "loss": 0.5467, + "step": 19613 + }, + { + "epoch": 0.8093587521663779, + "grad_norm": 3.1506140904170175, + "learning_rate": 2.769778058836061e-07, + "loss": 0.4344, + "step": 19614 + }, + { + "epoch": 0.8094000165057358, + "grad_norm": 2.819608190045785, + "learning_rate": 2.7686174701224724e-07, + "loss": 0.5131, + "step": 19615 + }, + { + "epoch": 0.8094412808450937, + "grad_norm": 2.741618067863514, + "learning_rate": 2.767457099891974e-07, + "loss": 0.4997, + "step": 19616 + }, + { + "epoch": 0.8094825451844516, + "grad_norm": 2.4867594657796928, + "learning_rate": 2.766296948165294e-07, + "loss": 0.5604, + "step": 19617 + }, + { + "epoch": 0.8095238095238095, + "grad_norm": 3.519034920986633, + "learning_rate": 2.7651370149631574e-07, + "loss": 0.5151, + "step": 19618 + }, + { + "epoch": 0.8095650738631675, + "grad_norm": 2.586081468852777, + "learning_rate": 2.7639773003062774e-07, + "loss": 0.53, + "step": 19619 + }, + { + "epoch": 0.8096063382025254, + "grad_norm": 2.4715098859662086, + "learning_rate": 2.762817804215371e-07, + "loss": 0.545, + "step": 19620 + }, + { + "epoch": 0.8096476025418833, + "grad_norm": 2.611967024332654, + "learning_rate": 2.761658526711157e-07, + "loss": 0.5531, + "step": 19621 + }, + { + "epoch": 0.8096888668812412, + "grad_norm": 6.685048468383685, + "learning_rate": 2.7604994678143337e-07, + "loss": 0.4709, + "step": 19622 + }, + { + "epoch": 0.8097301312205991, + "grad_norm": 3.711300985593948, + "learning_rate": 2.7593406275456075e-07, + "loss": 0.4838, + "step": 19623 + }, + { + "epoch": 0.809771395559957, + "grad_norm": 3.968766636204344, + "learning_rate": 2.7581820059256786e-07, + "loss": 0.5182, + "step": 19624 + }, + { + "epoch": 0.8098126598993151, + "grad_norm": 2.7728479753566826, + "learning_rate": 2.757023602975249e-07, + "loss": 0.5113, + "step": 19625 + }, + { + "epoch": 0.809853924238673, + "grad_norm": 4.247451371812957, + "learning_rate": 2.755865418715e-07, + "loss": 0.5234, + "step": 19626 + }, + { + "epoch": 0.8098951885780309, + "grad_norm": 3.2160715289784654, + "learning_rate": 2.754707453165629e-07, + "loss": 0.4883, + "step": 19627 + }, + { + "epoch": 0.8099364529173888, + "grad_norm": 4.250299714025283, + "learning_rate": 2.753549706347811e-07, + "loss": 0.5478, + "step": 19628 + }, + { + "epoch": 0.8099777172567467, + "grad_norm": 2.5321916928895343, + "learning_rate": 2.7523921782822324e-07, + "loss": 0.528, + "step": 19629 + }, + { + "epoch": 0.8100189815961046, + "grad_norm": 3.6377893482023396, + "learning_rate": 2.751234868989572e-07, + "loss": 0.5362, + "step": 19630 + }, + { + "epoch": 0.8100602459354626, + "grad_norm": 2.43877806908251, + "learning_rate": 2.7500777784904953e-07, + "loss": 0.4791, + "step": 19631 + }, + { + "epoch": 0.8101015102748205, + "grad_norm": 4.316139146254759, + "learning_rate": 2.748920906805673e-07, + "loss": 0.6195, + "step": 19632 + }, + { + "epoch": 0.8101427746141784, + "grad_norm": 3.6102588338057813, + "learning_rate": 2.747764253955771e-07, + "loss": 0.4999, + "step": 19633 + }, + { + "epoch": 0.8101840389535363, + "grad_norm": 3.3077826505829204, + "learning_rate": 2.746607819961453e-07, + "loss": 0.5506, + "step": 19634 + }, + { + "epoch": 0.8102253032928943, + "grad_norm": 2.5583312165613554, + "learning_rate": 2.7454516048433704e-07, + "loss": 0.5407, + "step": 19635 + }, + { + "epoch": 0.8102665676322522, + "grad_norm": 8.471979088658012, + "learning_rate": 2.744295608622178e-07, + "loss": 0.4488, + "step": 19636 + }, + { + "epoch": 0.8103078319716102, + "grad_norm": 3.135088019081621, + "learning_rate": 2.7431398313185235e-07, + "loss": 0.5177, + "step": 19637 + }, + { + "epoch": 0.8103490963109681, + "grad_norm": 3.5986359300500315, + "learning_rate": 2.74198427295306e-07, + "loss": 0.5932, + "step": 19638 + }, + { + "epoch": 0.810390360650326, + "grad_norm": 2.0417828970894503, + "learning_rate": 2.740828933546415e-07, + "loss": 0.5436, + "step": 19639 + }, + { + "epoch": 0.8104316249896839, + "grad_norm": 4.385620637884058, + "learning_rate": 2.7396738131192405e-07, + "loss": 0.5483, + "step": 19640 + }, + { + "epoch": 0.8104728893290418, + "grad_norm": 2.763204834520765, + "learning_rate": 2.738518911692157e-07, + "loss": 0.5373, + "step": 19641 + }, + { + "epoch": 0.8105141536683997, + "grad_norm": 3.3884886277240116, + "learning_rate": 2.737364229285802e-07, + "loss": 0.5447, + "step": 19642 + }, + { + "epoch": 0.8105554180077577, + "grad_norm": 3.4613650635649353, + "learning_rate": 2.736209765920795e-07, + "loss": 0.5367, + "step": 19643 + }, + { + "epoch": 0.8105966823471156, + "grad_norm": 3.351134838045002, + "learning_rate": 2.735055521617762e-07, + "loss": 0.4716, + "step": 19644 + }, + { + "epoch": 0.8106379466864736, + "grad_norm": 2.317689044690201, + "learning_rate": 2.733901496397319e-07, + "loss": 0.5068, + "step": 19645 + }, + { + "epoch": 0.8106792110258315, + "grad_norm": 7.927870954778361, + "learning_rate": 2.7327476902800836e-07, + "loss": 0.4948, + "step": 19646 + }, + { + "epoch": 0.8107204753651894, + "grad_norm": 3.9153784958039792, + "learning_rate": 2.7315941032866585e-07, + "loss": 0.4921, + "step": 19647 + }, + { + "epoch": 0.8107617397045473, + "grad_norm": 3.281542350612777, + "learning_rate": 2.7304407354376547e-07, + "loss": 0.5683, + "step": 19648 + }, + { + "epoch": 0.8108030040439053, + "grad_norm": 2.1501002978021178, + "learning_rate": 2.729287586753672e-07, + "loss": 0.5204, + "step": 19649 + }, + { + "epoch": 0.8108442683832632, + "grad_norm": 2.8774192312358875, + "learning_rate": 2.728134657255314e-07, + "loss": 0.547, + "step": 19650 + }, + { + "epoch": 0.8108855327226211, + "grad_norm": 4.133480981103742, + "learning_rate": 2.7269819469631674e-07, + "loss": 0.5251, + "step": 19651 + }, + { + "epoch": 0.810926797061979, + "grad_norm": 2.9104774142060617, + "learning_rate": 2.7258294558978244e-07, + "loss": 0.5274, + "step": 19652 + }, + { + "epoch": 0.8109680614013369, + "grad_norm": 2.958036632664014, + "learning_rate": 2.7246771840798764e-07, + "loss": 0.4835, + "step": 19653 + }, + { + "epoch": 0.8110093257406948, + "grad_norm": 2.756920120746732, + "learning_rate": 2.7235251315298986e-07, + "loss": 0.5313, + "step": 19654 + }, + { + "epoch": 0.8110505900800529, + "grad_norm": 5.729697365210783, + "learning_rate": 2.722373298268477e-07, + "loss": 0.4794, + "step": 19655 + }, + { + "epoch": 0.8110918544194108, + "grad_norm": 4.0729863157137896, + "learning_rate": 2.7212216843161793e-07, + "loss": 0.5075, + "step": 19656 + }, + { + "epoch": 0.8111331187587687, + "grad_norm": 2.120909022597581, + "learning_rate": 2.7200702896935777e-07, + "loss": 0.4886, + "step": 19657 + }, + { + "epoch": 0.8111743830981266, + "grad_norm": 4.997689870260202, + "learning_rate": 2.718919114421244e-07, + "loss": 0.4482, + "step": 19658 + }, + { + "epoch": 0.8112156474374845, + "grad_norm": 4.40948335351582, + "learning_rate": 2.717768158519735e-07, + "loss": 0.5241, + "step": 19659 + }, + { + "epoch": 0.8112569117768424, + "grad_norm": 2.5470163385262956, + "learning_rate": 2.716617422009611e-07, + "loss": 0.5317, + "step": 19660 + }, + { + "epoch": 0.8112981761162004, + "grad_norm": 5.088008241422544, + "learning_rate": 2.7154669049114277e-07, + "loss": 0.5603, + "step": 19661 + }, + { + "epoch": 0.8113394404555583, + "grad_norm": 3.2461412026931074, + "learning_rate": 2.7143166072457414e-07, + "loss": 0.5151, + "step": 19662 + }, + { + "epoch": 0.8113807047949162, + "grad_norm": 3.1381411817332405, + "learning_rate": 2.713166529033091e-07, + "loss": 0.4956, + "step": 19663 + }, + { + "epoch": 0.8114219691342741, + "grad_norm": 2.6740730902082186, + "learning_rate": 2.712016670294023e-07, + "loss": 0.4904, + "step": 19664 + }, + { + "epoch": 0.8114632334736321, + "grad_norm": 3.0720458496750536, + "learning_rate": 2.710867031049078e-07, + "loss": 0.4756, + "step": 19665 + }, + { + "epoch": 0.81150449781299, + "grad_norm": 3.5163850901390137, + "learning_rate": 2.7097176113187925e-07, + "loss": 0.5299, + "step": 19666 + }, + { + "epoch": 0.811545762152348, + "grad_norm": 1.9769805892417, + "learning_rate": 2.7085684111236973e-07, + "loss": 0.4942, + "step": 19667 + }, + { + "epoch": 0.8115870264917059, + "grad_norm": 2.397209127704873, + "learning_rate": 2.707419430484314e-07, + "loss": 0.4919, + "step": 19668 + }, + { + "epoch": 0.8116282908310638, + "grad_norm": 3.6838123915664296, + "learning_rate": 2.706270669421173e-07, + "loss": 0.4933, + "step": 19669 + }, + { + "epoch": 0.8116695551704217, + "grad_norm": 3.1167112634052376, + "learning_rate": 2.7051221279547896e-07, + "loss": 0.5112, + "step": 19670 + }, + { + "epoch": 0.8117108195097796, + "grad_norm": 2.455830520905721, + "learning_rate": 2.703973806105687e-07, + "loss": 0.5733, + "step": 19671 + }, + { + "epoch": 0.8117520838491376, + "grad_norm": 4.466053158584401, + "learning_rate": 2.7028257038943686e-07, + "loss": 0.5436, + "step": 19672 + }, + { + "epoch": 0.8117933481884955, + "grad_norm": 3.6124986574900273, + "learning_rate": 2.7016778213413454e-07, + "loss": 0.4696, + "step": 19673 + }, + { + "epoch": 0.8118346125278534, + "grad_norm": 3.2470502300898243, + "learning_rate": 2.700530158467123e-07, + "loss": 0.5487, + "step": 19674 + }, + { + "epoch": 0.8118758768672114, + "grad_norm": 6.369910716072584, + "learning_rate": 2.6993827152922033e-07, + "loss": 0.5316, + "step": 19675 + }, + { + "epoch": 0.8119171412065693, + "grad_norm": 2.7126470164371703, + "learning_rate": 2.6982354918370766e-07, + "loss": 0.5375, + "step": 19676 + }, + { + "epoch": 0.8119584055459272, + "grad_norm": 6.024463763534529, + "learning_rate": 2.6970884881222376e-07, + "loss": 0.5318, + "step": 19677 + }, + { + "epoch": 0.8119996698852852, + "grad_norm": 2.9867832281978797, + "learning_rate": 2.695941704168176e-07, + "loss": 0.4975, + "step": 19678 + }, + { + "epoch": 0.8120409342246431, + "grad_norm": 2.163226568909251, + "learning_rate": 2.694795139995383e-07, + "loss": 0.4245, + "step": 19679 + }, + { + "epoch": 0.812082198564001, + "grad_norm": 6.220381421863802, + "learning_rate": 2.6936487956243235e-07, + "loss": 0.5324, + "step": 19680 + }, + { + "epoch": 0.8121234629033589, + "grad_norm": 5.929068491938727, + "learning_rate": 2.692502671075482e-07, + "loss": 0.5127, + "step": 19681 + }, + { + "epoch": 0.8121647272427168, + "grad_norm": 8.889629642460154, + "learning_rate": 2.691356766369332e-07, + "loss": 0.5725, + "step": 19682 + }, + { + "epoch": 0.8122059915820747, + "grad_norm": 2.6197842171577856, + "learning_rate": 2.6902110815263465e-07, + "loss": 0.5398, + "step": 19683 + }, + { + "epoch": 0.8122472559214327, + "grad_norm": 4.332019792490748, + "learning_rate": 2.6890656165669804e-07, + "loss": 0.5443, + "step": 19684 + }, + { + "epoch": 0.8122885202607906, + "grad_norm": 5.397720988369873, + "learning_rate": 2.6879203715117007e-07, + "loss": 0.5087, + "step": 19685 + }, + { + "epoch": 0.8123297846001486, + "grad_norm": 5.8514993914442535, + "learning_rate": 2.686775346380963e-07, + "loss": 0.5458, + "step": 19686 + }, + { + "epoch": 0.8123710489395065, + "grad_norm": 1.9764450870639725, + "learning_rate": 2.685630541195224e-07, + "loss": 0.5088, + "step": 19687 + }, + { + "epoch": 0.8124123132788644, + "grad_norm": 3.209736217951251, + "learning_rate": 2.6844859559749247e-07, + "loss": 0.5667, + "step": 19688 + }, + { + "epoch": 0.8124535776182223, + "grad_norm": 3.6579914905139996, + "learning_rate": 2.683341590740516e-07, + "loss": 0.5459, + "step": 19689 + }, + { + "epoch": 0.8124948419575803, + "grad_norm": 6.208159151308496, + "learning_rate": 2.682197445512438e-07, + "loss": 0.5281, + "step": 19690 + }, + { + "epoch": 0.8125361062969382, + "grad_norm": 2.225222562902665, + "learning_rate": 2.681053520311132e-07, + "loss": 0.5658, + "step": 19691 + }, + { + "epoch": 0.8125773706362961, + "grad_norm": 4.024782578826452, + "learning_rate": 2.679909815157028e-07, + "loss": 0.4646, + "step": 19692 + }, + { + "epoch": 0.812618634975654, + "grad_norm": 2.38177948377339, + "learning_rate": 2.678766330070549e-07, + "loss": 0.4882, + "step": 19693 + }, + { + "epoch": 0.8126598993150119, + "grad_norm": 2.5767280359750853, + "learning_rate": 2.6776230650721276e-07, + "loss": 0.5103, + "step": 19694 + }, + { + "epoch": 0.8127011636543698, + "grad_norm": 6.687953134075424, + "learning_rate": 2.676480020182185e-07, + "loss": 0.528, + "step": 19695 + }, + { + "epoch": 0.8127424279937279, + "grad_norm": 2.972862574635812, + "learning_rate": 2.67533719542114e-07, + "loss": 0.5253, + "step": 19696 + }, + { + "epoch": 0.8127836923330858, + "grad_norm": 2.88873464826331, + "learning_rate": 2.6741945908094013e-07, + "loss": 0.5222, + "step": 19697 + }, + { + "epoch": 0.8128249566724437, + "grad_norm": 1.866890322831006, + "learning_rate": 2.673052206367381e-07, + "loss": 0.4802, + "step": 19698 + }, + { + "epoch": 0.8128662210118016, + "grad_norm": 3.989874913036786, + "learning_rate": 2.6719100421154906e-07, + "loss": 0.4997, + "step": 19699 + }, + { + "epoch": 0.8129074853511595, + "grad_norm": 6.296090548429626, + "learning_rate": 2.6707680980741234e-07, + "loss": 0.4976, + "step": 19700 + }, + { + "epoch": 0.8129487496905174, + "grad_norm": 2.1940144041850194, + "learning_rate": 2.6696263742636794e-07, + "loss": 0.489, + "step": 19701 + }, + { + "epoch": 0.8129900140298754, + "grad_norm": 2.9379780640771163, + "learning_rate": 2.668484870704556e-07, + "loss": 0.5051, + "step": 19702 + }, + { + "epoch": 0.8130312783692333, + "grad_norm": 2.8261612858964273, + "learning_rate": 2.667343587417144e-07, + "loss": 0.4631, + "step": 19703 + }, + { + "epoch": 0.8130725427085912, + "grad_norm": 2.3259512046369193, + "learning_rate": 2.6662025244218243e-07, + "loss": 0.5048, + "step": 19704 + }, + { + "epoch": 0.8131138070479491, + "grad_norm": 3.521418997954656, + "learning_rate": 2.6650616817389813e-07, + "loss": 0.5446, + "step": 19705 + }, + { + "epoch": 0.8131550713873071, + "grad_norm": 2.1396985811567237, + "learning_rate": 2.6639210593889966e-07, + "loss": 0.5441, + "step": 19706 + }, + { + "epoch": 0.813196335726665, + "grad_norm": 2.9243260219992426, + "learning_rate": 2.6627806573922396e-07, + "loss": 0.5092, + "step": 19707 + }, + { + "epoch": 0.813237600066023, + "grad_norm": 2.7477940570518884, + "learning_rate": 2.661640475769087e-07, + "loss": 0.5556, + "step": 19708 + }, + { + "epoch": 0.8132788644053809, + "grad_norm": 4.695441677271069, + "learning_rate": 2.6605005145398976e-07, + "loss": 0.4931, + "step": 19709 + }, + { + "epoch": 0.8133201287447388, + "grad_norm": 6.0936066521047, + "learning_rate": 2.659360773725036e-07, + "loss": 0.5039, + "step": 19710 + }, + { + "epoch": 0.8133613930840967, + "grad_norm": 2.4925184726812573, + "learning_rate": 2.658221253344864e-07, + "loss": 0.4757, + "step": 19711 + }, + { + "epoch": 0.8134026574234546, + "grad_norm": 2.2326175147887684, + "learning_rate": 2.657081953419739e-07, + "loss": 0.4807, + "step": 19712 + }, + { + "epoch": 0.8134439217628125, + "grad_norm": 2.861006928551175, + "learning_rate": 2.6559428739700024e-07, + "loss": 0.5372, + "step": 19713 + }, + { + "epoch": 0.8134851861021705, + "grad_norm": 5.250228520483441, + "learning_rate": 2.654804015016007e-07, + "loss": 0.4591, + "step": 19714 + }, + { + "epoch": 0.8135264504415284, + "grad_norm": 3.3081437690588475, + "learning_rate": 2.653665376578094e-07, + "loss": 0.5168, + "step": 19715 + }, + { + "epoch": 0.8135677147808864, + "grad_norm": 3.240936771747087, + "learning_rate": 2.6525269586766085e-07, + "loss": 0.4918, + "step": 19716 + }, + { + "epoch": 0.8136089791202443, + "grad_norm": 2.6199553393230297, + "learning_rate": 2.651388761331876e-07, + "loss": 0.4935, + "step": 19717 + }, + { + "epoch": 0.8136502434596022, + "grad_norm": 3.3292017507867753, + "learning_rate": 2.6502507845642315e-07, + "loss": 0.5012, + "step": 19718 + }, + { + "epoch": 0.8136915077989602, + "grad_norm": 2.7498488522786935, + "learning_rate": 2.6491130283940056e-07, + "loss": 0.5286, + "step": 19719 + }, + { + "epoch": 0.8137327721383181, + "grad_norm": 3.498057538713874, + "learning_rate": 2.647975492841519e-07, + "loss": 0.5112, + "step": 19720 + }, + { + "epoch": 0.813774036477676, + "grad_norm": 2.74185005662897, + "learning_rate": 2.6468381779270853e-07, + "loss": 0.5261, + "step": 19721 + }, + { + "epoch": 0.8138153008170339, + "grad_norm": 2.0553698693004865, + "learning_rate": 2.645701083671026e-07, + "loss": 0.4823, + "step": 19722 + }, + { + "epoch": 0.8138565651563918, + "grad_norm": 9.857781979622603, + "learning_rate": 2.644564210093649e-07, + "loss": 0.5213, + "step": 19723 + }, + { + "epoch": 0.8138978294957497, + "grad_norm": 1.844704733104912, + "learning_rate": 2.6434275572152695e-07, + "loss": 0.5186, + "step": 19724 + }, + { + "epoch": 0.8139390938351077, + "grad_norm": 2.8459141638008654, + "learning_rate": 2.642291125056181e-07, + "loss": 0.519, + "step": 19725 + }, + { + "epoch": 0.8139803581744657, + "grad_norm": 3.358279856241268, + "learning_rate": 2.6411549136366874e-07, + "loss": 0.5316, + "step": 19726 + }, + { + "epoch": 0.8140216225138236, + "grad_norm": 4.10666101005902, + "learning_rate": 2.640018922977083e-07, + "loss": 0.5628, + "step": 19727 + }, + { + "epoch": 0.8140628868531815, + "grad_norm": 9.621763004031239, + "learning_rate": 2.6388831530976656e-07, + "loss": 0.5145, + "step": 19728 + }, + { + "epoch": 0.8141041511925394, + "grad_norm": 2.803391493885897, + "learning_rate": 2.637747604018713e-07, + "loss": 0.5166, + "step": 19729 + }, + { + "epoch": 0.8141454155318973, + "grad_norm": 4.4414386060582105, + "learning_rate": 2.636612275760515e-07, + "loss": 0.5018, + "step": 19730 + }, + { + "epoch": 0.8141866798712553, + "grad_norm": 3.838382302075721, + "learning_rate": 2.6354771683433494e-07, + "loss": 0.5022, + "step": 19731 + }, + { + "epoch": 0.8142279442106132, + "grad_norm": 2.4727138762731626, + "learning_rate": 2.6343422817874964e-07, + "loss": 0.4864, + "step": 19732 + }, + { + "epoch": 0.8142692085499711, + "grad_norm": 4.569290195893894, + "learning_rate": 2.633207616113225e-07, + "loss": 0.5583, + "step": 19733 + }, + { + "epoch": 0.814310472889329, + "grad_norm": 2.2725862522101226, + "learning_rate": 2.6320731713407996e-07, + "loss": 0.5235, + "step": 19734 + }, + { + "epoch": 0.8143517372286869, + "grad_norm": 3.407891991359105, + "learning_rate": 2.6309389474904855e-07, + "loss": 0.4559, + "step": 19735 + }, + { + "epoch": 0.814393001568045, + "grad_norm": 2.628970280547602, + "learning_rate": 2.629804944582549e-07, + "loss": 0.5383, + "step": 19736 + }, + { + "epoch": 0.8144342659074029, + "grad_norm": 2.8837354100713584, + "learning_rate": 2.6286711626372375e-07, + "loss": 0.4782, + "step": 19737 + }, + { + "epoch": 0.8144755302467608, + "grad_norm": 3.795463945169142, + "learning_rate": 2.627537601674808e-07, + "loss": 0.4784, + "step": 19738 + }, + { + "epoch": 0.8145167945861187, + "grad_norm": 11.100281668052236, + "learning_rate": 2.6264042617155073e-07, + "loss": 0.5132, + "step": 19739 + }, + { + "epoch": 0.8145580589254766, + "grad_norm": 3.5618247690193603, + "learning_rate": 2.625271142779586e-07, + "loss": 0.5538, + "step": 19740 + }, + { + "epoch": 0.8145993232648345, + "grad_norm": 2.826647253970329, + "learning_rate": 2.6241382448872735e-07, + "loss": 0.5906, + "step": 19741 + }, + { + "epoch": 0.8146405876041924, + "grad_norm": 2.7011313257654246, + "learning_rate": 2.6230055680588114e-07, + "loss": 0.5415, + "step": 19742 + }, + { + "epoch": 0.8146818519435504, + "grad_norm": 4.585227544937488, + "learning_rate": 2.621873112314434e-07, + "loss": 0.484, + "step": 19743 + }, + { + "epoch": 0.8147231162829083, + "grad_norm": 16.300973134397402, + "learning_rate": 2.6207408776743696e-07, + "loss": 0.5386, + "step": 19744 + }, + { + "epoch": 0.8147643806222662, + "grad_norm": 2.4380646777691184, + "learning_rate": 2.6196088641588437e-07, + "loss": 0.4774, + "step": 19745 + }, + { + "epoch": 0.8148056449616241, + "grad_norm": 3.3635244699062907, + "learning_rate": 2.618477071788069e-07, + "loss": 0.5165, + "step": 19746 + }, + { + "epoch": 0.8148469093009821, + "grad_norm": 3.2718920445496, + "learning_rate": 2.617345500582269e-07, + "loss": 0.5181, + "step": 19747 + }, + { + "epoch": 0.81488817364034, + "grad_norm": 4.796697615200018, + "learning_rate": 2.6162141505616536e-07, + "loss": 0.4336, + "step": 19748 + }, + { + "epoch": 0.814929437979698, + "grad_norm": 15.651353956094049, + "learning_rate": 2.615083021746438e-07, + "loss": 0.5099, + "step": 19749 + }, + { + "epoch": 0.8149707023190559, + "grad_norm": 6.175977168971703, + "learning_rate": 2.613952114156816e-07, + "loss": 0.504, + "step": 19750 + }, + { + "epoch": 0.8150119666584138, + "grad_norm": 5.142473361748522, + "learning_rate": 2.6128214278129965e-07, + "loss": 0.5089, + "step": 19751 + }, + { + "epoch": 0.8150532309977717, + "grad_norm": 3.711698313317253, + "learning_rate": 2.6116909627351737e-07, + "loss": 0.5115, + "step": 19752 + }, + { + "epoch": 0.8150944953371296, + "grad_norm": 8.964173975703662, + "learning_rate": 2.6105607189435453e-07, + "loss": 0.509, + "step": 19753 + }, + { + "epoch": 0.8151357596764875, + "grad_norm": 5.500855269894198, + "learning_rate": 2.609430696458292e-07, + "loss": 0.476, + "step": 19754 + }, + { + "epoch": 0.8151770240158455, + "grad_norm": 2.5045944235308006, + "learning_rate": 2.6083008952996024e-07, + "loss": 0.4589, + "step": 19755 + }, + { + "epoch": 0.8152182883552034, + "grad_norm": 2.604170327770852, + "learning_rate": 2.6071713154876585e-07, + "loss": 0.5382, + "step": 19756 + }, + { + "epoch": 0.8152595526945614, + "grad_norm": 6.7434607893544465, + "learning_rate": 2.6060419570426413e-07, + "loss": 0.5183, + "step": 19757 + }, + { + "epoch": 0.8153008170339193, + "grad_norm": 6.795098229295203, + "learning_rate": 2.6049128199847176e-07, + "loss": 0.4904, + "step": 19758 + }, + { + "epoch": 0.8153420813732772, + "grad_norm": 5.1924004533165125, + "learning_rate": 2.603783904334057e-07, + "loss": 0.5523, + "step": 19759 + }, + { + "epoch": 0.8153833457126352, + "grad_norm": 2.1483481326224485, + "learning_rate": 2.602655210110827e-07, + "loss": 0.5316, + "step": 19760 + }, + { + "epoch": 0.8154246100519931, + "grad_norm": 3.4236212753756616, + "learning_rate": 2.6015267373351906e-07, + "loss": 0.4711, + "step": 19761 + }, + { + "epoch": 0.815465874391351, + "grad_norm": 2.901939303603086, + "learning_rate": 2.600398486027299e-07, + "loss": 0.492, + "step": 19762 + }, + { + "epoch": 0.8155071387307089, + "grad_norm": 5.206341345306432, + "learning_rate": 2.59927045620731e-07, + "loss": 0.4923, + "step": 19763 + }, + { + "epoch": 0.8155484030700668, + "grad_norm": 3.2727388952588674, + "learning_rate": 2.598142647895373e-07, + "loss": 0.5098, + "step": 19764 + }, + { + "epoch": 0.8155896674094247, + "grad_norm": 2.3030729866315376, + "learning_rate": 2.597015061111637e-07, + "loss": 0.5497, + "step": 19765 + }, + { + "epoch": 0.8156309317487827, + "grad_norm": 3.026783455931736, + "learning_rate": 2.595887695876235e-07, + "loss": 0.5539, + "step": 19766 + }, + { + "epoch": 0.8156721960881407, + "grad_norm": 3.708385151425993, + "learning_rate": 2.594760552209309e-07, + "loss": 0.5365, + "step": 19767 + }, + { + "epoch": 0.8157134604274986, + "grad_norm": 2.762995849122203, + "learning_rate": 2.593633630130993e-07, + "loss": 0.4885, + "step": 19768 + }, + { + "epoch": 0.8157547247668565, + "grad_norm": 2.3266052996333175, + "learning_rate": 2.592506929661419e-07, + "loss": 0.5331, + "step": 19769 + }, + { + "epoch": 0.8157959891062144, + "grad_norm": 2.7744029443059492, + "learning_rate": 2.5913804508207085e-07, + "loss": 0.4819, + "step": 19770 + }, + { + "epoch": 0.8158372534455723, + "grad_norm": 3.3323100201914304, + "learning_rate": 2.590254193628987e-07, + "loss": 0.5984, + "step": 19771 + }, + { + "epoch": 0.8158785177849303, + "grad_norm": 13.174492961904441, + "learning_rate": 2.589128158106367e-07, + "loss": 0.5768, + "step": 19772 + }, + { + "epoch": 0.8159197821242882, + "grad_norm": 4.1539351392036865, + "learning_rate": 2.588002344272964e-07, + "loss": 0.504, + "step": 19773 + }, + { + "epoch": 0.8159610464636461, + "grad_norm": 2.455609412138459, + "learning_rate": 2.586876752148893e-07, + "loss": 0.5291, + "step": 19774 + }, + { + "epoch": 0.816002310803004, + "grad_norm": 10.14521031975429, + "learning_rate": 2.585751381754254e-07, + "loss": 0.5367, + "step": 19775 + }, + { + "epoch": 0.8160435751423619, + "grad_norm": 2.2958012297332093, + "learning_rate": 2.584626233109151e-07, + "loss": 0.551, + "step": 19776 + }, + { + "epoch": 0.81608483948172, + "grad_norm": 3.4081627605319658, + "learning_rate": 2.583501306233685e-07, + "loss": 0.4801, + "step": 19777 + }, + { + "epoch": 0.8161261038210779, + "grad_norm": 5.077389494781085, + "learning_rate": 2.582376601147942e-07, + "loss": 0.5057, + "step": 19778 + }, + { + "epoch": 0.8161673681604358, + "grad_norm": 4.09775386422603, + "learning_rate": 2.581252117872018e-07, + "loss": 0.4921, + "step": 19779 + }, + { + "epoch": 0.8162086324997937, + "grad_norm": 4.6146626915196505, + "learning_rate": 2.580127856425999e-07, + "loss": 0.5301, + "step": 19780 + }, + { + "epoch": 0.8162498968391516, + "grad_norm": 2.3705524023821725, + "learning_rate": 2.57900381682997e-07, + "loss": 0.5936, + "step": 19781 + }, + { + "epoch": 0.8162911611785095, + "grad_norm": 6.2179408633400115, + "learning_rate": 2.577879999104001e-07, + "loss": 0.5242, + "step": 19782 + }, + { + "epoch": 0.8163324255178674, + "grad_norm": 11.487666598410511, + "learning_rate": 2.57675640326817e-07, + "loss": 0.4737, + "step": 19783 + }, + { + "epoch": 0.8163736898572254, + "grad_norm": 2.6407676626722476, + "learning_rate": 2.5756330293425526e-07, + "loss": 0.4954, + "step": 19784 + }, + { + "epoch": 0.8164149541965833, + "grad_norm": 2.3679289663995373, + "learning_rate": 2.5745098773472067e-07, + "loss": 0.5233, + "step": 19785 + }, + { + "epoch": 0.8164562185359412, + "grad_norm": 2.2038385312673636, + "learning_rate": 2.5733869473022014e-07, + "loss": 0.4662, + "step": 19786 + }, + { + "epoch": 0.8164974828752992, + "grad_norm": 6.8210474864673545, + "learning_rate": 2.572264239227587e-07, + "loss": 0.4702, + "step": 19787 + }, + { + "epoch": 0.8165387472146571, + "grad_norm": 4.833941811431108, + "learning_rate": 2.571141753143425e-07, + "loss": 0.4974, + "step": 19788 + }, + { + "epoch": 0.816580011554015, + "grad_norm": 2.521094275479585, + "learning_rate": 2.5700194890697616e-07, + "loss": 0.5364, + "step": 19789 + }, + { + "epoch": 0.816621275893373, + "grad_norm": 5.010248144647355, + "learning_rate": 2.56889744702665e-07, + "loss": 0.5194, + "step": 19790 + }, + { + "epoch": 0.8166625402327309, + "grad_norm": 7.985441264266491, + "learning_rate": 2.5677756270341237e-07, + "loss": 0.5542, + "step": 19791 + }, + { + "epoch": 0.8167038045720888, + "grad_norm": 2.475609257675962, + "learning_rate": 2.566654029112225e-07, + "loss": 0.5099, + "step": 19792 + }, + { + "epoch": 0.8167450689114467, + "grad_norm": 2.385879333073889, + "learning_rate": 2.56553265328099e-07, + "loss": 0.461, + "step": 19793 + }, + { + "epoch": 0.8167863332508046, + "grad_norm": 8.456978778821766, + "learning_rate": 2.5644114995604505e-07, + "loss": 0.5696, + "step": 19794 + }, + { + "epoch": 0.8168275975901625, + "grad_norm": 2.196017566488619, + "learning_rate": 2.563290567970627e-07, + "loss": 0.521, + "step": 19795 + }, + { + "epoch": 0.8168688619295205, + "grad_norm": 4.4226925049274985, + "learning_rate": 2.5621698585315464e-07, + "loss": 0.5207, + "step": 19796 + }, + { + "epoch": 0.8169101262688785, + "grad_norm": 2.725230768547741, + "learning_rate": 2.5610493712632304e-07, + "loss": 0.4833, + "step": 19797 + }, + { + "epoch": 0.8169513906082364, + "grad_norm": 4.678832398943033, + "learning_rate": 2.559929106185691e-07, + "loss": 0.4878, + "step": 19798 + }, + { + "epoch": 0.8169926549475943, + "grad_norm": 16.830027809316533, + "learning_rate": 2.558809063318934e-07, + "loss": 0.4558, + "step": 19799 + }, + { + "epoch": 0.8170339192869522, + "grad_norm": 4.138425716053124, + "learning_rate": 2.557689242682969e-07, + "loss": 0.5104, + "step": 19800 + }, + { + "epoch": 0.8170751836263102, + "grad_norm": 3.378265975310865, + "learning_rate": 2.5565696442978e-07, + "loss": 0.5209, + "step": 19801 + }, + { + "epoch": 0.8171164479656681, + "grad_norm": 1.8376675979690054, + "learning_rate": 2.555450268183432e-07, + "loss": 0.4938, + "step": 19802 + }, + { + "epoch": 0.817157712305026, + "grad_norm": 2.5282750063504933, + "learning_rate": 2.5543311143598476e-07, + "loss": 0.4944, + "step": 19803 + }, + { + "epoch": 0.8171989766443839, + "grad_norm": 2.552086405777895, + "learning_rate": 2.5532121828470446e-07, + "loss": 0.4956, + "step": 19804 + }, + { + "epoch": 0.8172402409837418, + "grad_norm": 3.258801086632809, + "learning_rate": 2.5520934736650087e-07, + "loss": 0.5294, + "step": 19805 + }, + { + "epoch": 0.8172815053230997, + "grad_norm": 6.95558311964212, + "learning_rate": 2.550974986833727e-07, + "loss": 0.4853, + "step": 19806 + }, + { + "epoch": 0.8173227696624576, + "grad_norm": 2.829614589493367, + "learning_rate": 2.549856722373171e-07, + "loss": 0.5173, + "step": 19807 + }, + { + "epoch": 0.8173640340018157, + "grad_norm": 2.5092113807968595, + "learning_rate": 2.548738680303321e-07, + "loss": 0.5494, + "step": 19808 + }, + { + "epoch": 0.8174052983411736, + "grad_norm": 2.3112127820623902, + "learning_rate": 2.547620860644145e-07, + "loss": 0.5316, + "step": 19809 + }, + { + "epoch": 0.8174465626805315, + "grad_norm": 2.9710541170843476, + "learning_rate": 2.546503263415615e-07, + "loss": 0.5247, + "step": 19810 + }, + { + "epoch": 0.8174878270198894, + "grad_norm": 2.8186811939934087, + "learning_rate": 2.5453858886376917e-07, + "loss": 0.5636, + "step": 19811 + }, + { + "epoch": 0.8175290913592473, + "grad_norm": 3.8610739309682787, + "learning_rate": 2.5442687363303285e-07, + "loss": 0.4381, + "step": 19812 + }, + { + "epoch": 0.8175703556986053, + "grad_norm": 4.2193774381415325, + "learning_rate": 2.5431518065134843e-07, + "loss": 0.4942, + "step": 19813 + }, + { + "epoch": 0.8176116200379632, + "grad_norm": 2.657198619910849, + "learning_rate": 2.5420350992071175e-07, + "loss": 0.4998, + "step": 19814 + }, + { + "epoch": 0.8176528843773211, + "grad_norm": 2.1673479537718703, + "learning_rate": 2.5409186144311623e-07, + "loss": 0.5288, + "step": 19815 + }, + { + "epoch": 0.817694148716679, + "grad_norm": 2.7599395903776993, + "learning_rate": 2.5398023522055706e-07, + "loss": 0.5288, + "step": 19816 + }, + { + "epoch": 0.8177354130560369, + "grad_norm": 2.6548704768774494, + "learning_rate": 2.538686312550278e-07, + "loss": 0.5273, + "step": 19817 + }, + { + "epoch": 0.817776677395395, + "grad_norm": 1.7502338323217916, + "learning_rate": 2.5375704954852266e-07, + "loss": 0.5076, + "step": 19818 + }, + { + "epoch": 0.8178179417347529, + "grad_norm": 2.514300917339976, + "learning_rate": 2.5364549010303367e-07, + "loss": 0.49, + "step": 19819 + }, + { + "epoch": 0.8178592060741108, + "grad_norm": 3.627448629517708, + "learning_rate": 2.535339529205543e-07, + "loss": 0.4705, + "step": 19820 + }, + { + "epoch": 0.8179004704134687, + "grad_norm": 2.0087446187127758, + "learning_rate": 2.5342243800307664e-07, + "loss": 0.4642, + "step": 19821 + }, + { + "epoch": 0.8179417347528266, + "grad_norm": 2.157293018426783, + "learning_rate": 2.5331094535259305e-07, + "loss": 0.4995, + "step": 19822 + }, + { + "epoch": 0.8179829990921845, + "grad_norm": 2.961056206849456, + "learning_rate": 2.5319947497109444e-07, + "loss": 0.5389, + "step": 19823 + }, + { + "epoch": 0.8180242634315424, + "grad_norm": 6.8865955257822975, + "learning_rate": 2.530880268605724e-07, + "loss": 0.4963, + "step": 19824 + }, + { + "epoch": 0.8180655277709004, + "grad_norm": 2.714538612466391, + "learning_rate": 2.5297660102301726e-07, + "loss": 0.5175, + "step": 19825 + }, + { + "epoch": 0.8181067921102583, + "grad_norm": 3.3130763329266517, + "learning_rate": 2.528651974604196e-07, + "loss": 0.4823, + "step": 19826 + }, + { + "epoch": 0.8181480564496162, + "grad_norm": 2.5575199772910597, + "learning_rate": 2.527538161747697e-07, + "loss": 0.4849, + "step": 19827 + }, + { + "epoch": 0.8181893207889742, + "grad_norm": 2.334081062334171, + "learning_rate": 2.5264245716805646e-07, + "loss": 0.4386, + "step": 19828 + }, + { + "epoch": 0.8182305851283321, + "grad_norm": 3.995482374730322, + "learning_rate": 2.5253112044226934e-07, + "loss": 0.5273, + "step": 19829 + }, + { + "epoch": 0.81827184946769, + "grad_norm": 2.4759911358401148, + "learning_rate": 2.524198059993971e-07, + "loss": 0.488, + "step": 19830 + }, + { + "epoch": 0.818313113807048, + "grad_norm": 2.8041514338157922, + "learning_rate": 2.523085138414284e-07, + "loss": 0.4786, + "step": 19831 + }, + { + "epoch": 0.8183543781464059, + "grad_norm": 3.7236501101645674, + "learning_rate": 2.5219724397035057e-07, + "loss": 0.5174, + "step": 19832 + }, + { + "epoch": 0.8183956424857638, + "grad_norm": 2.5807847846730345, + "learning_rate": 2.5208599638815143e-07, + "loss": 0.5397, + "step": 19833 + }, + { + "epoch": 0.8184369068251217, + "grad_norm": 2.556644618851636, + "learning_rate": 2.5197477109681814e-07, + "loss": 0.5033, + "step": 19834 + }, + { + "epoch": 0.8184781711644796, + "grad_norm": 3.4135255157454307, + "learning_rate": 2.518635680983381e-07, + "loss": 0.5306, + "step": 19835 + }, + { + "epoch": 0.8185194355038375, + "grad_norm": 2.473774796461955, + "learning_rate": 2.5175238739469654e-07, + "loss": 0.5568, + "step": 19836 + }, + { + "epoch": 0.8185606998431955, + "grad_norm": 2.874225781764274, + "learning_rate": 2.5164122898788034e-07, + "loss": 0.5255, + "step": 19837 + }, + { + "epoch": 0.8186019641825535, + "grad_norm": 4.584302157739477, + "learning_rate": 2.515300928798744e-07, + "loss": 0.5543, + "step": 19838 + }, + { + "epoch": 0.8186432285219114, + "grad_norm": 2.0292807802720345, + "learning_rate": 2.514189790726646e-07, + "loss": 0.5296, + "step": 19839 + }, + { + "epoch": 0.8186844928612693, + "grad_norm": 3.03419818494231, + "learning_rate": 2.51307887568235e-07, + "loss": 0.4847, + "step": 19840 + }, + { + "epoch": 0.8187257572006272, + "grad_norm": 3.099813362157745, + "learning_rate": 2.5119681836857016e-07, + "loss": 0.5558, + "step": 19841 + }, + { + "epoch": 0.8187670215399852, + "grad_norm": 3.36168492944615, + "learning_rate": 2.510857714756543e-07, + "loss": 0.4517, + "step": 19842 + }, + { + "epoch": 0.8188082858793431, + "grad_norm": 2.8390974582031343, + "learning_rate": 2.5097474689147104e-07, + "loss": 0.4881, + "step": 19843 + }, + { + "epoch": 0.818849550218701, + "grad_norm": 2.6594138301024324, + "learning_rate": 2.508637446180031e-07, + "loss": 0.4951, + "step": 19844 + }, + { + "epoch": 0.8188908145580589, + "grad_norm": 2.8580032234178856, + "learning_rate": 2.507527646572335e-07, + "loss": 0.5225, + "step": 19845 + }, + { + "epoch": 0.8189320788974168, + "grad_norm": 2.1955548137626177, + "learning_rate": 2.506418070111447e-07, + "loss": 0.4547, + "step": 19846 + }, + { + "epoch": 0.8189733432367747, + "grad_norm": 2.4364911305817034, + "learning_rate": 2.5053087168171905e-07, + "loss": 0.4805, + "step": 19847 + }, + { + "epoch": 0.8190146075761328, + "grad_norm": 16.555918043874605, + "learning_rate": 2.504199586709374e-07, + "loss": 0.5073, + "step": 19848 + }, + { + "epoch": 0.8190558719154907, + "grad_norm": 3.2153533190414785, + "learning_rate": 2.5030906798078117e-07, + "loss": 0.5529, + "step": 19849 + }, + { + "epoch": 0.8190971362548486, + "grad_norm": 2.6166817538375597, + "learning_rate": 2.501981996132316e-07, + "loss": 0.4839, + "step": 19850 + }, + { + "epoch": 0.8191384005942065, + "grad_norm": 6.1833377493545765, + "learning_rate": 2.5008735357026834e-07, + "loss": 0.515, + "step": 19851 + }, + { + "epoch": 0.8191796649335644, + "grad_norm": 5.365856179887717, + "learning_rate": 2.499765298538722e-07, + "loss": 0.4764, + "step": 19852 + }, + { + "epoch": 0.8192209292729223, + "grad_norm": 4.486341817303236, + "learning_rate": 2.498657284660217e-07, + "loss": 0.5129, + "step": 19853 + }, + { + "epoch": 0.8192621936122803, + "grad_norm": 2.693500933342661, + "learning_rate": 2.4975494940869687e-07, + "loss": 0.4738, + "step": 19854 + }, + { + "epoch": 0.8193034579516382, + "grad_norm": 3.3991575547316155, + "learning_rate": 2.496441926838767e-07, + "loss": 0.492, + "step": 19855 + }, + { + "epoch": 0.8193447222909961, + "grad_norm": 2.3235238100835387, + "learning_rate": 2.495334582935387e-07, + "loss": 0.5101, + "step": 19856 + }, + { + "epoch": 0.819385986630354, + "grad_norm": 2.3737619330029087, + "learning_rate": 2.4942274623966133e-07, + "loss": 0.4677, + "step": 19857 + }, + { + "epoch": 0.819427250969712, + "grad_norm": 2.7309771825957343, + "learning_rate": 2.4931205652422217e-07, + "loss": 0.489, + "step": 19858 + }, + { + "epoch": 0.81946851530907, + "grad_norm": 7.267154017311627, + "learning_rate": 2.492013891491987e-07, + "loss": 0.5163, + "step": 19859 + }, + { + "epoch": 0.8195097796484279, + "grad_norm": 2.9827279953891392, + "learning_rate": 2.490907441165672e-07, + "loss": 0.4822, + "step": 19860 + }, + { + "epoch": 0.8195510439877858, + "grad_norm": 1.9306753318489815, + "learning_rate": 2.4898012142830434e-07, + "loss": 0.4564, + "step": 19861 + }, + { + "epoch": 0.8195923083271437, + "grad_norm": 15.915659695983209, + "learning_rate": 2.4886952108638597e-07, + "loss": 0.5165, + "step": 19862 + }, + { + "epoch": 0.8196335726665016, + "grad_norm": 2.2137709490312485, + "learning_rate": 2.487589430927882e-07, + "loss": 0.52, + "step": 19863 + }, + { + "epoch": 0.8196748370058595, + "grad_norm": 5.695831970722875, + "learning_rate": 2.4864838744948583e-07, + "loss": 0.5655, + "step": 19864 + }, + { + "epoch": 0.8197161013452174, + "grad_norm": 2.5599330159792224, + "learning_rate": 2.4853785415845326e-07, + "loss": 0.4741, + "step": 19865 + }, + { + "epoch": 0.8197573656845754, + "grad_norm": 2.786829852001488, + "learning_rate": 2.4842734322166535e-07, + "loss": 0.5154, + "step": 19866 + }, + { + "epoch": 0.8197986300239333, + "grad_norm": 5.401327847655792, + "learning_rate": 2.483168546410961e-07, + "loss": 0.4977, + "step": 19867 + }, + { + "epoch": 0.8198398943632912, + "grad_norm": 2.357778719308694, + "learning_rate": 2.4820638841871935e-07, + "loss": 0.5012, + "step": 19868 + }, + { + "epoch": 0.8198811587026492, + "grad_norm": 2.8622509835748815, + "learning_rate": 2.4809594455650764e-07, + "loss": 0.5288, + "step": 19869 + }, + { + "epoch": 0.8199224230420071, + "grad_norm": 3.616647807469318, + "learning_rate": 2.479855230564341e-07, + "loss": 0.5113, + "step": 19870 + }, + { + "epoch": 0.819963687381365, + "grad_norm": 3.5050218284858987, + "learning_rate": 2.4787512392047124e-07, + "loss": 0.5624, + "step": 19871 + }, + { + "epoch": 0.820004951720723, + "grad_norm": 2.3841917297191846, + "learning_rate": 2.477647471505914e-07, + "loss": 0.4657, + "step": 19872 + }, + { + "epoch": 0.8200462160600809, + "grad_norm": 4.77729675393109, + "learning_rate": 2.4765439274876545e-07, + "loss": 0.5316, + "step": 19873 + }, + { + "epoch": 0.8200874803994388, + "grad_norm": 4.454334336615316, + "learning_rate": 2.4754406071696486e-07, + "loss": 0.5746, + "step": 19874 + }, + { + "epoch": 0.8201287447387967, + "grad_norm": 2.4051744426554635, + "learning_rate": 2.4743375105716084e-07, + "loss": 0.5144, + "step": 19875 + }, + { + "epoch": 0.8201700090781546, + "grad_norm": 2.3088097805780627, + "learning_rate": 2.473234637713235e-07, + "loss": 0.5441, + "step": 19876 + }, + { + "epoch": 0.8202112734175125, + "grad_norm": 6.481982236153556, + "learning_rate": 2.472131988614224e-07, + "loss": 0.4871, + "step": 19877 + }, + { + "epoch": 0.8202525377568705, + "grad_norm": 2.1182911588000346, + "learning_rate": 2.471029563294277e-07, + "loss": 0.4665, + "step": 19878 + }, + { + "epoch": 0.8202938020962285, + "grad_norm": 3.8820077231805983, + "learning_rate": 2.4699273617730844e-07, + "loss": 0.4956, + "step": 19879 + }, + { + "epoch": 0.8203350664355864, + "grad_norm": 4.362851562423269, + "learning_rate": 2.4688253840703373e-07, + "loss": 0.522, + "step": 19880 + }, + { + "epoch": 0.8203763307749443, + "grad_norm": 2.1265692785265564, + "learning_rate": 2.4677236302057133e-07, + "loss": 0.4754, + "step": 19881 + }, + { + "epoch": 0.8204175951143022, + "grad_norm": 5.76008838445537, + "learning_rate": 2.466622100198898e-07, + "loss": 0.5235, + "step": 19882 + }, + { + "epoch": 0.8204588594536601, + "grad_norm": 3.7566036119875448, + "learning_rate": 2.4655207940695647e-07, + "loss": 0.517, + "step": 19883 + }, + { + "epoch": 0.8205001237930181, + "grad_norm": 2.026325741866514, + "learning_rate": 2.4644197118373884e-07, + "loss": 0.5127, + "step": 19884 + }, + { + "epoch": 0.820541388132376, + "grad_norm": 10.386417881032134, + "learning_rate": 2.463318853522034e-07, + "loss": 0.5094, + "step": 19885 + }, + { + "epoch": 0.8205826524717339, + "grad_norm": 2.572482833415482, + "learning_rate": 2.4622182191431656e-07, + "loss": 0.4732, + "step": 19886 + }, + { + "epoch": 0.8206239168110918, + "grad_norm": 2.5171032144554264, + "learning_rate": 2.461117808720444e-07, + "loss": 0.4523, + "step": 19887 + }, + { + "epoch": 0.8206651811504497, + "grad_norm": 5.552435920371817, + "learning_rate": 2.460017622273531e-07, + "loss": 0.4961, + "step": 19888 + }, + { + "epoch": 0.8207064454898078, + "grad_norm": 16.34481017667847, + "learning_rate": 2.4589176598220695e-07, + "loss": 0.5005, + "step": 19889 + }, + { + "epoch": 0.8207477098291657, + "grad_norm": 5.841715203513966, + "learning_rate": 2.457817921385715e-07, + "loss": 0.5255, + "step": 19890 + }, + { + "epoch": 0.8207889741685236, + "grad_norm": 2.6235595738352435, + "learning_rate": 2.4567184069841044e-07, + "loss": 0.5056, + "step": 19891 + }, + { + "epoch": 0.8208302385078815, + "grad_norm": 2.4288706584304136, + "learning_rate": 2.455619116636885e-07, + "loss": 0.5205, + "step": 19892 + }, + { + "epoch": 0.8208715028472394, + "grad_norm": 2.5399700213802237, + "learning_rate": 2.4545200503636856e-07, + "loss": 0.544, + "step": 19893 + }, + { + "epoch": 0.8209127671865973, + "grad_norm": 4.3158919707968835, + "learning_rate": 2.4534212081841426e-07, + "loss": 0.4749, + "step": 19894 + }, + { + "epoch": 0.8209540315259553, + "grad_norm": 3.1600949597548182, + "learning_rate": 2.452322590117883e-07, + "loss": 0.5032, + "step": 19895 + }, + { + "epoch": 0.8209952958653132, + "grad_norm": 12.08540511585424, + "learning_rate": 2.4512241961845354e-07, + "loss": 0.5364, + "step": 19896 + }, + { + "epoch": 0.8210365602046711, + "grad_norm": 3.092789410668075, + "learning_rate": 2.4501260264037127e-07, + "loss": 0.4705, + "step": 19897 + }, + { + "epoch": 0.821077824544029, + "grad_norm": 37.607128022107894, + "learning_rate": 2.4490280807950346e-07, + "loss": 0.5145, + "step": 19898 + }, + { + "epoch": 0.821119088883387, + "grad_norm": 3.887009051300915, + "learning_rate": 2.447930359378111e-07, + "loss": 0.5294, + "step": 19899 + }, + { + "epoch": 0.8211603532227449, + "grad_norm": 2.9993835800377115, + "learning_rate": 2.4468328621725575e-07, + "loss": 0.4471, + "step": 19900 + }, + { + "epoch": 0.8212016175621029, + "grad_norm": 3.018917438408032, + "learning_rate": 2.445735589197967e-07, + "loss": 0.5182, + "step": 19901 + }, + { + "epoch": 0.8212428819014608, + "grad_norm": 2.0074664715908153, + "learning_rate": 2.444638540473945e-07, + "loss": 0.5654, + "step": 19902 + }, + { + "epoch": 0.8212841462408187, + "grad_norm": 3.8023260328281423, + "learning_rate": 2.4435417160200906e-07, + "loss": 0.5195, + "step": 19903 + }, + { + "epoch": 0.8213254105801766, + "grad_norm": 2.702686432865432, + "learning_rate": 2.4424451158559896e-07, + "loss": 0.495, + "step": 19904 + }, + { + "epoch": 0.8213666749195345, + "grad_norm": 2.78975059573708, + "learning_rate": 2.441348740001236e-07, + "loss": 0.519, + "step": 19905 + }, + { + "epoch": 0.8214079392588924, + "grad_norm": 6.598037764348423, + "learning_rate": 2.4402525884754085e-07, + "loss": 0.5148, + "step": 19906 + }, + { + "epoch": 0.8214492035982504, + "grad_norm": 3.295349455889519, + "learning_rate": 2.439156661298089e-07, + "loss": 0.4478, + "step": 19907 + }, + { + "epoch": 0.8214904679376083, + "grad_norm": 3.076271353482923, + "learning_rate": 2.438060958488853e-07, + "loss": 0.4925, + "step": 19908 + }, + { + "epoch": 0.8215317322769663, + "grad_norm": 2.1578502589672013, + "learning_rate": 2.4369654800672773e-07, + "loss": 0.5145, + "step": 19909 + }, + { + "epoch": 0.8215729966163242, + "grad_norm": 2.6925837391979983, + "learning_rate": 2.435870226052923e-07, + "loss": 0.515, + "step": 19910 + }, + { + "epoch": 0.8216142609556821, + "grad_norm": 2.661293737729415, + "learning_rate": 2.4347751964653576e-07, + "loss": 0.5687, + "step": 19911 + }, + { + "epoch": 0.82165552529504, + "grad_norm": 3.5471021194361825, + "learning_rate": 2.4336803913241404e-07, + "loss": 0.5005, + "step": 19912 + }, + { + "epoch": 0.821696789634398, + "grad_norm": 2.5764105226849017, + "learning_rate": 2.43258581064883e-07, + "loss": 0.5099, + "step": 19913 + }, + { + "epoch": 0.8217380539737559, + "grad_norm": 2.5693127770342143, + "learning_rate": 2.4314914544589723e-07, + "loss": 0.5407, + "step": 19914 + }, + { + "epoch": 0.8217793183131138, + "grad_norm": 3.3672753315769586, + "learning_rate": 2.4303973227741204e-07, + "loss": 0.4241, + "step": 19915 + }, + { + "epoch": 0.8218205826524717, + "grad_norm": 2.753894601210841, + "learning_rate": 2.429303415613819e-07, + "loss": 0.5189, + "step": 19916 + }, + { + "epoch": 0.8218618469918296, + "grad_norm": 2.4422550855626315, + "learning_rate": 2.4282097329976065e-07, + "loss": 0.6013, + "step": 19917 + }, + { + "epoch": 0.8219031113311875, + "grad_norm": 2.6835877187603416, + "learning_rate": 2.4271162749450134e-07, + "loss": 0.5017, + "step": 19918 + }, + { + "epoch": 0.8219443756705456, + "grad_norm": 3.419448732513566, + "learning_rate": 2.426023041475577e-07, + "loss": 0.5041, + "step": 19919 + }, + { + "epoch": 0.8219856400099035, + "grad_norm": 4.787627680189849, + "learning_rate": 2.4249300326088237e-07, + "loss": 0.5178, + "step": 19920 + }, + { + "epoch": 0.8220269043492614, + "grad_norm": 5.821879467610324, + "learning_rate": 2.4238372483642813e-07, + "loss": 0.4761, + "step": 19921 + }, + { + "epoch": 0.8220681686886193, + "grad_norm": 3.734890115663737, + "learning_rate": 2.422744688761464e-07, + "loss": 0.4673, + "step": 19922 + }, + { + "epoch": 0.8221094330279772, + "grad_norm": 6.647229107972858, + "learning_rate": 2.421652353819888e-07, + "loss": 0.4909, + "step": 19923 + }, + { + "epoch": 0.8221506973673351, + "grad_norm": 2.4590211512604316, + "learning_rate": 2.4205602435590695e-07, + "loss": 0.4963, + "step": 19924 + }, + { + "epoch": 0.8221919617066931, + "grad_norm": 2.740848878349923, + "learning_rate": 2.419468357998516e-07, + "loss": 0.5328, + "step": 19925 + }, + { + "epoch": 0.822233226046051, + "grad_norm": 2.5219664918833296, + "learning_rate": 2.418376697157727e-07, + "loss": 0.4972, + "step": 19926 + }, + { + "epoch": 0.8222744903854089, + "grad_norm": 3.8792682566800347, + "learning_rate": 2.4172852610562033e-07, + "loss": 0.467, + "step": 19927 + }, + { + "epoch": 0.8223157547247668, + "grad_norm": 3.4955180546067486, + "learning_rate": 2.416194049713443e-07, + "loss": 0.5364, + "step": 19928 + }, + { + "epoch": 0.8223570190641247, + "grad_norm": 2.658705050652042, + "learning_rate": 2.415103063148941e-07, + "loss": 0.5032, + "step": 19929 + }, + { + "epoch": 0.8223982834034828, + "grad_norm": 3.998565809396813, + "learning_rate": 2.414012301382179e-07, + "loss": 0.5595, + "step": 19930 + }, + { + "epoch": 0.8224395477428407, + "grad_norm": 4.557249073140689, + "learning_rate": 2.4129217644326395e-07, + "loss": 0.4682, + "step": 19931 + }, + { + "epoch": 0.8224808120821986, + "grad_norm": 3.5658999611847997, + "learning_rate": 2.4118314523198064e-07, + "loss": 0.5272, + "step": 19932 + }, + { + "epoch": 0.8225220764215565, + "grad_norm": 3.663761361900127, + "learning_rate": 2.410741365063157e-07, + "loss": 0.5747, + "step": 19933 + }, + { + "epoch": 0.8225633407609144, + "grad_norm": 3.761369678610147, + "learning_rate": 2.4096515026821574e-07, + "loss": 0.4747, + "step": 19934 + }, + { + "epoch": 0.8226046051002723, + "grad_norm": 4.247207434819435, + "learning_rate": 2.408561865196277e-07, + "loss": 0.5487, + "step": 19935 + }, + { + "epoch": 0.8226458694396303, + "grad_norm": 2.16389119877585, + "learning_rate": 2.407472452624982e-07, + "loss": 0.4959, + "step": 19936 + }, + { + "epoch": 0.8226871337789882, + "grad_norm": 3.6452640754112955, + "learning_rate": 2.406383264987733e-07, + "loss": 0.4827, + "step": 19937 + }, + { + "epoch": 0.8227283981183461, + "grad_norm": 2.4757701887276666, + "learning_rate": 2.4052943023039804e-07, + "loss": 0.4683, + "step": 19938 + }, + { + "epoch": 0.822769662457704, + "grad_norm": 2.7036821137062783, + "learning_rate": 2.4042055645931795e-07, + "loss": 0.4955, + "step": 19939 + }, + { + "epoch": 0.822810926797062, + "grad_norm": 2.9284424184117923, + "learning_rate": 2.403117051874776e-07, + "loss": 0.4936, + "step": 19940 + }, + { + "epoch": 0.8228521911364199, + "grad_norm": 3.6041725351178977, + "learning_rate": 2.4020287641682195e-07, + "loss": 0.5192, + "step": 19941 + }, + { + "epoch": 0.8228934554757779, + "grad_norm": 2.906916281282678, + "learning_rate": 2.4009407014929424e-07, + "loss": 0.4752, + "step": 19942 + }, + { + "epoch": 0.8229347198151358, + "grad_norm": 9.57264034873135, + "learning_rate": 2.39985286386838e-07, + "loss": 0.53, + "step": 19943 + }, + { + "epoch": 0.8229759841544937, + "grad_norm": 3.689450919501123, + "learning_rate": 2.3987652513139656e-07, + "loss": 0.5747, + "step": 19944 + }, + { + "epoch": 0.8230172484938516, + "grad_norm": 3.398005126206704, + "learning_rate": 2.397677863849128e-07, + "loss": 0.5017, + "step": 19945 + }, + { + "epoch": 0.8230585128332095, + "grad_norm": 2.5810402929711427, + "learning_rate": 2.3965907014932935e-07, + "loss": 0.4789, + "step": 19946 + }, + { + "epoch": 0.8230997771725674, + "grad_norm": 2.9337974492609757, + "learning_rate": 2.395503764265873e-07, + "loss": 0.5125, + "step": 19947 + }, + { + "epoch": 0.8231410415119254, + "grad_norm": 3.016144174538508, + "learning_rate": 2.3944170521862886e-07, + "loss": 0.4989, + "step": 19948 + }, + { + "epoch": 0.8231823058512833, + "grad_norm": 4.204632060531851, + "learning_rate": 2.39333056527395e-07, + "loss": 0.474, + "step": 19949 + }, + { + "epoch": 0.8232235701906413, + "grad_norm": 5.531643201213288, + "learning_rate": 2.392244303548268e-07, + "loss": 0.536, + "step": 19950 + }, + { + "epoch": 0.8232648345299992, + "grad_norm": 2.4412866346638413, + "learning_rate": 2.3911582670286374e-07, + "loss": 0.5051, + "step": 19951 + }, + { + "epoch": 0.8233060988693571, + "grad_norm": 2.5917571144277916, + "learning_rate": 2.3900724557344637e-07, + "loss": 0.5068, + "step": 19952 + }, + { + "epoch": 0.823347363208715, + "grad_norm": 3.1220506867629734, + "learning_rate": 2.3889868696851447e-07, + "loss": 0.4586, + "step": 19953 + }, + { + "epoch": 0.823388627548073, + "grad_norm": 3.448471832546481, + "learning_rate": 2.387901508900064e-07, + "loss": 0.5229, + "step": 19954 + }, + { + "epoch": 0.8234298918874309, + "grad_norm": 8.603546038057274, + "learning_rate": 2.386816373398618e-07, + "loss": 0.6062, + "step": 19955 + }, + { + "epoch": 0.8234711562267888, + "grad_norm": 2.006279031240202, + "learning_rate": 2.38573146320018e-07, + "loss": 0.5335, + "step": 19956 + }, + { + "epoch": 0.8235124205661467, + "grad_norm": 3.156586405843529, + "learning_rate": 2.3846467783241353e-07, + "loss": 0.5374, + "step": 19957 + }, + { + "epoch": 0.8235536849055046, + "grad_norm": 4.184312362817035, + "learning_rate": 2.3835623187898609e-07, + "loss": 0.5396, + "step": 19958 + }, + { + "epoch": 0.8235949492448625, + "grad_norm": 2.9742241726081193, + "learning_rate": 2.382478084616721e-07, + "loss": 0.5109, + "step": 19959 + }, + { + "epoch": 0.8236362135842206, + "grad_norm": 3.9195276406080968, + "learning_rate": 2.3813940758240872e-07, + "loss": 0.4913, + "step": 19960 + }, + { + "epoch": 0.8236774779235785, + "grad_norm": 2.425905004434964, + "learning_rate": 2.380310292431322e-07, + "loss": 0.5085, + "step": 19961 + }, + { + "epoch": 0.8237187422629364, + "grad_norm": 4.656110085081371, + "learning_rate": 2.3792267344577884e-07, + "loss": 0.5005, + "step": 19962 + }, + { + "epoch": 0.8237600066022943, + "grad_norm": 5.690475648487472, + "learning_rate": 2.3781434019228326e-07, + "loss": 0.4689, + "step": 19963 + }, + { + "epoch": 0.8238012709416522, + "grad_norm": 2.7211621892565527, + "learning_rate": 2.3770602948458125e-07, + "loss": 0.4843, + "step": 19964 + }, + { + "epoch": 0.8238425352810101, + "grad_norm": 3.132258049545447, + "learning_rate": 2.3759774132460727e-07, + "loss": 0.5012, + "step": 19965 + }, + { + "epoch": 0.8238837996203681, + "grad_norm": 10.055134019988232, + "learning_rate": 2.3748947571429607e-07, + "loss": 0.5714, + "step": 19966 + }, + { + "epoch": 0.823925063959726, + "grad_norm": 8.273286591317744, + "learning_rate": 2.373812326555808e-07, + "loss": 0.564, + "step": 19967 + }, + { + "epoch": 0.8239663282990839, + "grad_norm": 3.604207791652657, + "learning_rate": 2.372730121503956e-07, + "loss": 0.4827, + "step": 19968 + }, + { + "epoch": 0.8240075926384418, + "grad_norm": 2.380282273500941, + "learning_rate": 2.3716481420067289e-07, + "loss": 0.5073, + "step": 19969 + }, + { + "epoch": 0.8240488569777998, + "grad_norm": 2.2898211486066598, + "learning_rate": 2.370566388083461e-07, + "loss": 0.5408, + "step": 19970 + }, + { + "epoch": 0.8240901213171578, + "grad_norm": 29.3278539189419, + "learning_rate": 2.369484859753469e-07, + "loss": 0.5362, + "step": 19971 + }, + { + "epoch": 0.8241313856565157, + "grad_norm": 4.671750195863237, + "learning_rate": 2.3684035570360723e-07, + "loss": 0.4839, + "step": 19972 + }, + { + "epoch": 0.8241726499958736, + "grad_norm": 2.536969198439282, + "learning_rate": 2.367322479950589e-07, + "loss": 0.5374, + "step": 19973 + }, + { + "epoch": 0.8242139143352315, + "grad_norm": 6.9514990954959055, + "learning_rate": 2.3662416285163312e-07, + "loss": 0.5183, + "step": 19974 + }, + { + "epoch": 0.8242551786745894, + "grad_norm": 2.611632935928905, + "learning_rate": 2.3651610027525994e-07, + "loss": 0.4828, + "step": 19975 + }, + { + "epoch": 0.8242964430139473, + "grad_norm": 14.701556710337, + "learning_rate": 2.3640806026786992e-07, + "loss": 0.504, + "step": 19976 + }, + { + "epoch": 0.8243377073533052, + "grad_norm": 3.0929497161570896, + "learning_rate": 2.363000428313929e-07, + "loss": 0.4236, + "step": 19977 + }, + { + "epoch": 0.8243789716926632, + "grad_norm": 3.4737175861361353, + "learning_rate": 2.3619204796775895e-07, + "loss": 0.4988, + "step": 19978 + }, + { + "epoch": 0.8244202360320211, + "grad_norm": 3.448594466806985, + "learning_rate": 2.3608407567889605e-07, + "loss": 0.4781, + "step": 19979 + }, + { + "epoch": 0.8244615003713791, + "grad_norm": 5.1418156601683735, + "learning_rate": 2.3597612596673352e-07, + "loss": 0.4913, + "step": 19980 + }, + { + "epoch": 0.824502764710737, + "grad_norm": 2.248116083164825, + "learning_rate": 2.3586819883319943e-07, + "loss": 0.4833, + "step": 19981 + }, + { + "epoch": 0.8245440290500949, + "grad_norm": 7.698801971356862, + "learning_rate": 2.357602942802221e-07, + "loss": 0.4465, + "step": 19982 + }, + { + "epoch": 0.8245852933894529, + "grad_norm": 3.0148989820030963, + "learning_rate": 2.356524123097285e-07, + "loss": 0.4895, + "step": 19983 + }, + { + "epoch": 0.8246265577288108, + "grad_norm": 2.1767202582812475, + "learning_rate": 2.355445529236454e-07, + "loss": 0.51, + "step": 19984 + }, + { + "epoch": 0.8246678220681687, + "grad_norm": 2.279597338561588, + "learning_rate": 2.354367161238996e-07, + "loss": 0.502, + "step": 19985 + }, + { + "epoch": 0.8247090864075266, + "grad_norm": 3.3013106175253317, + "learning_rate": 2.3532890191241774e-07, + "loss": 0.5079, + "step": 19986 + }, + { + "epoch": 0.8247503507468845, + "grad_norm": 2.907925973632419, + "learning_rate": 2.3522111029112574e-07, + "loss": 0.5625, + "step": 19987 + }, + { + "epoch": 0.8247916150862424, + "grad_norm": 2.678637098572233, + "learning_rate": 2.351133412619483e-07, + "loss": 0.4962, + "step": 19988 + }, + { + "epoch": 0.8248328794256004, + "grad_norm": 2.6243220605440576, + "learning_rate": 2.3500559482681077e-07, + "loss": 0.5616, + "step": 19989 + }, + { + "epoch": 0.8248741437649583, + "grad_norm": 2.952622040798861, + "learning_rate": 2.3489787098763787e-07, + "loss": 0.5092, + "step": 19990 + }, + { + "epoch": 0.8249154081043163, + "grad_norm": 5.434844145970602, + "learning_rate": 2.347901697463542e-07, + "loss": 0.5421, + "step": 19991 + }, + { + "epoch": 0.8249566724436742, + "grad_norm": 3.970164602164066, + "learning_rate": 2.3468249110488287e-07, + "loss": 0.4748, + "step": 19992 + }, + { + "epoch": 0.8249979367830321, + "grad_norm": 24.328454588380716, + "learning_rate": 2.3457483506514753e-07, + "loss": 0.4936, + "step": 19993 + }, + { + "epoch": 0.82503920112239, + "grad_norm": 3.9645246905906513, + "learning_rate": 2.3446720162907148e-07, + "loss": 0.4783, + "step": 19994 + }, + { + "epoch": 0.825080465461748, + "grad_norm": 5.793206579667163, + "learning_rate": 2.3435959079857733e-07, + "loss": 0.4832, + "step": 19995 + }, + { + "epoch": 0.8251217298011059, + "grad_norm": 3.3476543004626724, + "learning_rate": 2.3425200257558655e-07, + "loss": 0.4538, + "step": 19996 + }, + { + "epoch": 0.8251629941404638, + "grad_norm": 2.2463070102246308, + "learning_rate": 2.3414443696202125e-07, + "loss": 0.509, + "step": 19997 + }, + { + "epoch": 0.8252042584798217, + "grad_norm": 3.232268871374866, + "learning_rate": 2.3403689395980326e-07, + "loss": 0.5571, + "step": 19998 + }, + { + "epoch": 0.8252455228191796, + "grad_norm": 15.786310103226528, + "learning_rate": 2.3392937357085352e-07, + "loss": 0.4666, + "step": 19999 + }, + { + "epoch": 0.8252867871585375, + "grad_norm": 3.2199059066751525, + "learning_rate": 2.3382187579709203e-07, + "loss": 0.4877, + "step": 20000 + }, + { + "epoch": 0.8253280514978956, + "grad_norm": 3.9051798230929724, + "learning_rate": 2.3371440064043935e-07, + "loss": 0.4684, + "step": 20001 + }, + { + "epoch": 0.8253693158372535, + "grad_norm": 16.544377130052826, + "learning_rate": 2.3360694810281535e-07, + "loss": 0.5049, + "step": 20002 + }, + { + "epoch": 0.8254105801766114, + "grad_norm": 3.203210370807695, + "learning_rate": 2.3349951818613964e-07, + "loss": 0.5264, + "step": 20003 + }, + { + "epoch": 0.8254518445159693, + "grad_norm": 5.701080978075867, + "learning_rate": 2.3339211089233032e-07, + "loss": 0.4961, + "step": 20004 + }, + { + "epoch": 0.8254931088553272, + "grad_norm": 113.483897437176, + "learning_rate": 2.3328472622330655e-07, + "loss": 0.5172, + "step": 20005 + }, + { + "epoch": 0.8255343731946851, + "grad_norm": 10.12654818681004, + "learning_rate": 2.331773641809863e-07, + "loss": 0.5061, + "step": 20006 + }, + { + "epoch": 0.8255756375340431, + "grad_norm": 3.139391826598504, + "learning_rate": 2.3307002476728801e-07, + "loss": 0.4888, + "step": 20007 + }, + { + "epoch": 0.825616901873401, + "grad_norm": 2.475409115263632, + "learning_rate": 2.3296270798412816e-07, + "loss": 0.5083, + "step": 20008 + }, + { + "epoch": 0.8256581662127589, + "grad_norm": 2.747196978580708, + "learning_rate": 2.328554138334239e-07, + "loss": 0.517, + "step": 20009 + }, + { + "epoch": 0.8256994305521168, + "grad_norm": 6.218111292555604, + "learning_rate": 2.3274814231709168e-07, + "loss": 0.5692, + "step": 20010 + }, + { + "epoch": 0.8257406948914748, + "grad_norm": 8.592052834532899, + "learning_rate": 2.326408934370483e-07, + "loss": 0.4775, + "step": 20011 + }, + { + "epoch": 0.8257819592308328, + "grad_norm": 3.1236733613380907, + "learning_rate": 2.3253366719520858e-07, + "loss": 0.503, + "step": 20012 + }, + { + "epoch": 0.8258232235701907, + "grad_norm": 6.6113669474828685, + "learning_rate": 2.324264635934883e-07, + "loss": 0.5459, + "step": 20013 + }, + { + "epoch": 0.8258644879095486, + "grad_norm": 4.014737702574394, + "learning_rate": 2.3231928263380243e-07, + "loss": 0.516, + "step": 20014 + }, + { + "epoch": 0.8259057522489065, + "grad_norm": 2.686786246082888, + "learning_rate": 2.322121243180656e-07, + "loss": 0.444, + "step": 20015 + }, + { + "epoch": 0.8259470165882644, + "grad_norm": 2.9639855818655723, + "learning_rate": 2.3210498864819147e-07, + "loss": 0.5442, + "step": 20016 + }, + { + "epoch": 0.8259882809276223, + "grad_norm": 6.26705596357623, + "learning_rate": 2.3199787562609415e-07, + "loss": 0.487, + "step": 20017 + }, + { + "epoch": 0.8260295452669802, + "grad_norm": 3.371518982361105, + "learning_rate": 2.318907852536868e-07, + "loss": 0.5411, + "step": 20018 + }, + { + "epoch": 0.8260708096063382, + "grad_norm": 3.993946495193791, + "learning_rate": 2.3178371753288252e-07, + "loss": 0.5857, + "step": 20019 + }, + { + "epoch": 0.8261120739456961, + "grad_norm": 2.918036696468877, + "learning_rate": 2.316766724655935e-07, + "loss": 0.4874, + "step": 20020 + }, + { + "epoch": 0.8261533382850541, + "grad_norm": 3.0535306016334114, + "learning_rate": 2.3156965005373215e-07, + "loss": 0.5373, + "step": 20021 + }, + { + "epoch": 0.826194602624412, + "grad_norm": 5.12608721539841, + "learning_rate": 2.3146265029920966e-07, + "loss": 0.5443, + "step": 20022 + }, + { + "epoch": 0.8262358669637699, + "grad_norm": 21.180153733881767, + "learning_rate": 2.313556732039378e-07, + "loss": 0.4797, + "step": 20023 + }, + { + "epoch": 0.8262771313031279, + "grad_norm": 2.74754035846685, + "learning_rate": 2.312487187698274e-07, + "loss": 0.4986, + "step": 20024 + }, + { + "epoch": 0.8263183956424858, + "grad_norm": 2.4141681298677073, + "learning_rate": 2.3114178699878857e-07, + "loss": 0.5043, + "step": 20025 + }, + { + "epoch": 0.8263596599818437, + "grad_norm": 2.33984356886792, + "learning_rate": 2.3103487789273165e-07, + "loss": 0.5163, + "step": 20026 + }, + { + "epoch": 0.8264009243212016, + "grad_norm": 5.398627980604164, + "learning_rate": 2.3092799145356625e-07, + "loss": 0.5527, + "step": 20027 + }, + { + "epoch": 0.8264421886605595, + "grad_norm": 2.72806689427884, + "learning_rate": 2.3082112768320202e-07, + "loss": 0.5144, + "step": 20028 + }, + { + "epoch": 0.8264834529999174, + "grad_norm": 2.7078338696146043, + "learning_rate": 2.3071428658354709e-07, + "loss": 0.5147, + "step": 20029 + }, + { + "epoch": 0.8265247173392753, + "grad_norm": 2.3901570676919426, + "learning_rate": 2.3060746815651011e-07, + "loss": 0.4931, + "step": 20030 + }, + { + "epoch": 0.8265659816786334, + "grad_norm": 2.834737213133396, + "learning_rate": 2.3050067240399992e-07, + "loss": 0.5329, + "step": 20031 + }, + { + "epoch": 0.8266072460179913, + "grad_norm": 2.054781424875766, + "learning_rate": 2.3039389932792293e-07, + "loss": 0.4155, + "step": 20032 + }, + { + "epoch": 0.8266485103573492, + "grad_norm": 3.4191876321910546, + "learning_rate": 2.3028714893018716e-07, + "loss": 0.4722, + "step": 20033 + }, + { + "epoch": 0.8266897746967071, + "grad_norm": 2.149083863145497, + "learning_rate": 2.3018042121269938e-07, + "loss": 0.5193, + "step": 20034 + }, + { + "epoch": 0.826731039036065, + "grad_norm": 4.8345025793885155, + "learning_rate": 2.300737161773656e-07, + "loss": 0.5139, + "step": 20035 + }, + { + "epoch": 0.826772303375423, + "grad_norm": 2.8361263157106618, + "learning_rate": 2.2996703382609262e-07, + "loss": 0.5342, + "step": 20036 + }, + { + "epoch": 0.8268135677147809, + "grad_norm": 2.1354142457603955, + "learning_rate": 2.298603741607851e-07, + "loss": 0.5549, + "step": 20037 + }, + { + "epoch": 0.8268548320541388, + "grad_norm": 2.4414916465072527, + "learning_rate": 2.2975373718334863e-07, + "loss": 0.4788, + "step": 20038 + }, + { + "epoch": 0.8268960963934967, + "grad_norm": 3.3093157086249088, + "learning_rate": 2.2964712289568822e-07, + "loss": 0.5419, + "step": 20039 + }, + { + "epoch": 0.8269373607328546, + "grad_norm": 3.897871673131828, + "learning_rate": 2.2954053129970836e-07, + "loss": 0.4865, + "step": 20040 + }, + { + "epoch": 0.8269786250722126, + "grad_norm": 35.005711621674436, + "learning_rate": 2.294339623973125e-07, + "loss": 0.5365, + "step": 20041 + }, + { + "epoch": 0.8270198894115706, + "grad_norm": 3.14689562614816, + "learning_rate": 2.2932741619040464e-07, + "loss": 0.5263, + "step": 20042 + }, + { + "epoch": 0.8270611537509285, + "grad_norm": 3.0767849941735825, + "learning_rate": 2.2922089268088774e-07, + "loss": 0.501, + "step": 20043 + }, + { + "epoch": 0.8271024180902864, + "grad_norm": 2.544554276189194, + "learning_rate": 2.2911439187066513e-07, + "loss": 0.5247, + "step": 20044 + }, + { + "epoch": 0.8271436824296443, + "grad_norm": 5.060902289509738, + "learning_rate": 2.2900791376163827e-07, + "loss": 0.4716, + "step": 20045 + }, + { + "epoch": 0.8271849467690022, + "grad_norm": 2.683630895051007, + "learning_rate": 2.2890145835570965e-07, + "loss": 0.4959, + "step": 20046 + }, + { + "epoch": 0.8272262111083601, + "grad_norm": 2.559370966793876, + "learning_rate": 2.287950256547812e-07, + "loss": 0.5265, + "step": 20047 + }, + { + "epoch": 0.8272674754477181, + "grad_norm": 2.6713252482918857, + "learning_rate": 2.2868861566075367e-07, + "loss": 0.5328, + "step": 20048 + }, + { + "epoch": 0.827308739787076, + "grad_norm": 5.000550148904067, + "learning_rate": 2.2858222837552746e-07, + "loss": 0.4983, + "step": 20049 + }, + { + "epoch": 0.8273500041264339, + "grad_norm": 4.477655406114808, + "learning_rate": 2.284758638010032e-07, + "loss": 0.508, + "step": 20050 + }, + { + "epoch": 0.8273912684657919, + "grad_norm": 6.566782605038469, + "learning_rate": 2.283695219390808e-07, + "loss": 0.5332, + "step": 20051 + }, + { + "epoch": 0.8274325328051498, + "grad_norm": 3.1901778609361835, + "learning_rate": 2.282632027916603e-07, + "loss": 0.4773, + "step": 20052 + }, + { + "epoch": 0.8274737971445078, + "grad_norm": 5.408170302642171, + "learning_rate": 2.2815690636064008e-07, + "loss": 0.5188, + "step": 20053 + }, + { + "epoch": 0.8275150614838657, + "grad_norm": 2.935545005114394, + "learning_rate": 2.2805063264791908e-07, + "loss": 0.4814, + "step": 20054 + }, + { + "epoch": 0.8275563258232236, + "grad_norm": 3.114425952648363, + "learning_rate": 2.2794438165539582e-07, + "loss": 0.5192, + "step": 20055 + }, + { + "epoch": 0.8275975901625815, + "grad_norm": 5.144326183933115, + "learning_rate": 2.2783815338496843e-07, + "loss": 0.5018, + "step": 20056 + }, + { + "epoch": 0.8276388545019394, + "grad_norm": 2.277198933927828, + "learning_rate": 2.2773194783853375e-07, + "loss": 0.5342, + "step": 20057 + }, + { + "epoch": 0.8276801188412973, + "grad_norm": 7.347505053485095, + "learning_rate": 2.276257650179892e-07, + "loss": 0.4939, + "step": 20058 + }, + { + "epoch": 0.8277213831806552, + "grad_norm": 3.6335822111737213, + "learning_rate": 2.2751960492523133e-07, + "loss": 0.5009, + "step": 20059 + }, + { + "epoch": 0.8277626475200132, + "grad_norm": 6.381979320462126, + "learning_rate": 2.2741346756215708e-07, + "loss": 0.5097, + "step": 20060 + }, + { + "epoch": 0.8278039118593711, + "grad_norm": 3.7663825034238347, + "learning_rate": 2.273073529306618e-07, + "loss": 0.4767, + "step": 20061 + }, + { + "epoch": 0.8278451761987291, + "grad_norm": 2.670892389197577, + "learning_rate": 2.2720126103264062e-07, + "loss": 0.5699, + "step": 20062 + }, + { + "epoch": 0.827886440538087, + "grad_norm": 2.274089277261851, + "learning_rate": 2.27095191869989e-07, + "loss": 0.5144, + "step": 20063 + }, + { + "epoch": 0.8279277048774449, + "grad_norm": 6.067739254719092, + "learning_rate": 2.2698914544460147e-07, + "loss": 0.5215, + "step": 20064 + }, + { + "epoch": 0.8279689692168029, + "grad_norm": 2.4051782743246726, + "learning_rate": 2.2688312175837284e-07, + "loss": 0.5599, + "step": 20065 + }, + { + "epoch": 0.8280102335561608, + "grad_norm": 2.494154451744083, + "learning_rate": 2.2677712081319624e-07, + "loss": 0.4834, + "step": 20066 + }, + { + "epoch": 0.8280514978955187, + "grad_norm": 19.336258272984104, + "learning_rate": 2.2667114261096517e-07, + "loss": 0.5362, + "step": 20067 + }, + { + "epoch": 0.8280927622348766, + "grad_norm": 3.965448958769328, + "learning_rate": 2.2656518715357293e-07, + "loss": 0.5331, + "step": 20068 + }, + { + "epoch": 0.8281340265742345, + "grad_norm": 45.20728533012538, + "learning_rate": 2.2645925444291255e-07, + "loss": 0.5066, + "step": 20069 + }, + { + "epoch": 0.8281752909135924, + "grad_norm": 1.917335516261659, + "learning_rate": 2.2635334448087547e-07, + "loss": 0.5328, + "step": 20070 + }, + { + "epoch": 0.8282165552529503, + "grad_norm": 2.612236735503126, + "learning_rate": 2.262474572693537e-07, + "loss": 0.5568, + "step": 20071 + }, + { + "epoch": 0.8282578195923084, + "grad_norm": 2.9890862918564016, + "learning_rate": 2.2614159281023923e-07, + "loss": 0.5256, + "step": 20072 + }, + { + "epoch": 0.8282990839316663, + "grad_norm": 2.730857723601545, + "learning_rate": 2.260357511054222e-07, + "loss": 0.5026, + "step": 20073 + }, + { + "epoch": 0.8283403482710242, + "grad_norm": 3.672325789160699, + "learning_rate": 2.2592993215679393e-07, + "loss": 0.5268, + "step": 20074 + }, + { + "epoch": 0.8283816126103821, + "grad_norm": 3.3860007583268965, + "learning_rate": 2.2582413596624408e-07, + "loss": 0.466, + "step": 20075 + }, + { + "epoch": 0.82842287694974, + "grad_norm": 2.7186093275886787, + "learning_rate": 2.2571836253566263e-07, + "loss": 0.5201, + "step": 20076 + }, + { + "epoch": 0.828464141289098, + "grad_norm": 2.792880482679318, + "learning_rate": 2.2561261186693927e-07, + "loss": 0.5179, + "step": 20077 + }, + { + "epoch": 0.8285054056284559, + "grad_norm": 2.192633524004812, + "learning_rate": 2.2550688396196228e-07, + "loss": 0.4779, + "step": 20078 + }, + { + "epoch": 0.8285466699678138, + "grad_norm": 2.2743654694623, + "learning_rate": 2.2540117882262085e-07, + "loss": 0.4745, + "step": 20079 + }, + { + "epoch": 0.8285879343071717, + "grad_norm": 2.4722428974951356, + "learning_rate": 2.2529549645080277e-07, + "loss": 0.4778, + "step": 20080 + }, + { + "epoch": 0.8286291986465296, + "grad_norm": 2.493228478948054, + "learning_rate": 2.2518983684839639e-07, + "loss": 0.5099, + "step": 20081 + }, + { + "epoch": 0.8286704629858876, + "grad_norm": 5.310691617757475, + "learning_rate": 2.250842000172882e-07, + "loss": 0.4506, + "step": 20082 + }, + { + "epoch": 0.8287117273252456, + "grad_norm": 2.940618481277832, + "learning_rate": 2.249785859593655e-07, + "loss": 0.5002, + "step": 20083 + }, + { + "epoch": 0.8287529916646035, + "grad_norm": 6.42018954871163, + "learning_rate": 2.2487299467651495e-07, + "loss": 0.5509, + "step": 20084 + }, + { + "epoch": 0.8287942560039614, + "grad_norm": 2.5731134766715384, + "learning_rate": 2.247674261706229e-07, + "loss": 0.4817, + "step": 20085 + }, + { + "epoch": 0.8288355203433193, + "grad_norm": 3.11295138018976, + "learning_rate": 2.246618804435745e-07, + "loss": 0.4937, + "step": 20086 + }, + { + "epoch": 0.8288767846826772, + "grad_norm": 2.608068580161178, + "learning_rate": 2.2455635749725555e-07, + "loss": 0.5201, + "step": 20087 + }, + { + "epoch": 0.8289180490220351, + "grad_norm": 3.190164414053653, + "learning_rate": 2.2445085733355042e-07, + "loss": 0.5092, + "step": 20088 + }, + { + "epoch": 0.8289593133613931, + "grad_norm": 2.2027070234247064, + "learning_rate": 2.2434537995434423e-07, + "loss": 0.4756, + "step": 20089 + }, + { + "epoch": 0.829000577700751, + "grad_norm": 8.417940384594305, + "learning_rate": 2.2423992536152049e-07, + "loss": 0.5168, + "step": 20090 + }, + { + "epoch": 0.8290418420401089, + "grad_norm": 3.422648902072065, + "learning_rate": 2.2413449355696318e-07, + "loss": 0.5489, + "step": 20091 + }, + { + "epoch": 0.8290831063794669, + "grad_norm": 3.2096942251223033, + "learning_rate": 2.240290845425555e-07, + "loss": 0.539, + "step": 20092 + }, + { + "epoch": 0.8291243707188248, + "grad_norm": 3.123702222022799, + "learning_rate": 2.2392369832018055e-07, + "loss": 0.4961, + "step": 20093 + }, + { + "epoch": 0.8291656350581827, + "grad_norm": 2.1271645609989758, + "learning_rate": 2.238183348917204e-07, + "loss": 0.5356, + "step": 20094 + }, + { + "epoch": 0.8292068993975407, + "grad_norm": 3.909026303240098, + "learning_rate": 2.237129942590573e-07, + "loss": 0.5029, + "step": 20095 + }, + { + "epoch": 0.8292481637368986, + "grad_norm": 2.4760327323715106, + "learning_rate": 2.2360767642407294e-07, + "loss": 0.5367, + "step": 20096 + }, + { + "epoch": 0.8292894280762565, + "grad_norm": 3.6304783070410767, + "learning_rate": 2.235023813886488e-07, + "loss": 0.5097, + "step": 20097 + }, + { + "epoch": 0.8293306924156144, + "grad_norm": 4.726864538843433, + "learning_rate": 2.233971091546651e-07, + "loss": 0.5189, + "step": 20098 + }, + { + "epoch": 0.8293719567549723, + "grad_norm": 7.975959396894989, + "learning_rate": 2.2329185972400263e-07, + "loss": 0.5446, + "step": 20099 + }, + { + "epoch": 0.8294132210943302, + "grad_norm": 2.1777943969153593, + "learning_rate": 2.231866330985417e-07, + "loss": 0.4916, + "step": 20100 + }, + { + "epoch": 0.8294544854336882, + "grad_norm": 4.229760265949633, + "learning_rate": 2.2308142928016118e-07, + "loss": 0.5006, + "step": 20101 + }, + { + "epoch": 0.8294957497730462, + "grad_norm": 2.3477637845045902, + "learning_rate": 2.229762482707412e-07, + "loss": 0.5068, + "step": 20102 + }, + { + "epoch": 0.8295370141124041, + "grad_norm": 3.849308429981397, + "learning_rate": 2.228710900721596e-07, + "loss": 0.5297, + "step": 20103 + }, + { + "epoch": 0.829578278451762, + "grad_norm": 1.9430796782008422, + "learning_rate": 2.2276595468629523e-07, + "loss": 0.4702, + "step": 20104 + }, + { + "epoch": 0.8296195427911199, + "grad_norm": 2.8402523880890036, + "learning_rate": 2.2266084211502606e-07, + "loss": 0.4701, + "step": 20105 + }, + { + "epoch": 0.8296608071304779, + "grad_norm": 4.121039233002267, + "learning_rate": 2.2255575236023011e-07, + "loss": 0.5177, + "step": 20106 + }, + { + "epoch": 0.8297020714698358, + "grad_norm": 5.835158004952457, + "learning_rate": 2.2245068542378356e-07, + "loss": 0.5325, + "step": 20107 + }, + { + "epoch": 0.8297433358091937, + "grad_norm": 5.407100081208236, + "learning_rate": 2.2234564130756385e-07, + "loss": 0.5217, + "step": 20108 + }, + { + "epoch": 0.8297846001485516, + "grad_norm": 4.364711977909252, + "learning_rate": 2.2224062001344753e-07, + "loss": 0.4704, + "step": 20109 + }, + { + "epoch": 0.8298258644879095, + "grad_norm": 12.357120418671695, + "learning_rate": 2.2213562154330975e-07, + "loss": 0.5498, + "step": 20110 + }, + { + "epoch": 0.8298671288272674, + "grad_norm": 2.528691449499373, + "learning_rate": 2.2203064589902666e-07, + "loss": 0.5403, + "step": 20111 + }, + { + "epoch": 0.8299083931666255, + "grad_norm": 2.899581090198533, + "learning_rate": 2.2192569308247313e-07, + "loss": 0.4692, + "step": 20112 + }, + { + "epoch": 0.8299496575059834, + "grad_norm": 3.8187774247049724, + "learning_rate": 2.218207630955243e-07, + "loss": 0.5177, + "step": 20113 + }, + { + "epoch": 0.8299909218453413, + "grad_norm": 3.755104679269372, + "learning_rate": 2.2171585594005416e-07, + "loss": 0.544, + "step": 20114 + }, + { + "epoch": 0.8300321861846992, + "grad_norm": 2.886144979890504, + "learning_rate": 2.2161097161793625e-07, + "loss": 0.5045, + "step": 20115 + }, + { + "epoch": 0.8300734505240571, + "grad_norm": 17.171561478023083, + "learning_rate": 2.2150611013104455e-07, + "loss": 0.4763, + "step": 20116 + }, + { + "epoch": 0.830114714863415, + "grad_norm": 2.422507348038355, + "learning_rate": 2.214012714812519e-07, + "loss": 0.5446, + "step": 20117 + }, + { + "epoch": 0.830155979202773, + "grad_norm": 1.7706919297318529, + "learning_rate": 2.2129645567043143e-07, + "loss": 0.525, + "step": 20118 + }, + { + "epoch": 0.8301972435421309, + "grad_norm": 2.7240118904945385, + "learning_rate": 2.2119166270045488e-07, + "loss": 0.4818, + "step": 20119 + }, + { + "epoch": 0.8302385078814888, + "grad_norm": 2.6141990896609735, + "learning_rate": 2.210868925731942e-07, + "loss": 0.457, + "step": 20120 + }, + { + "epoch": 0.8302797722208467, + "grad_norm": 2.1151351385493182, + "learning_rate": 2.209821452905211e-07, + "loss": 0.4807, + "step": 20121 + }, + { + "epoch": 0.8303210365602046, + "grad_norm": 2.8929457871834803, + "learning_rate": 2.2087742085430667e-07, + "loss": 0.5171, + "step": 20122 + }, + { + "epoch": 0.8303623008995626, + "grad_norm": 2.2645758823154543, + "learning_rate": 2.20772719266421e-07, + "loss": 0.5508, + "step": 20123 + }, + { + "epoch": 0.8304035652389206, + "grad_norm": 2.4760847998157325, + "learning_rate": 2.206680405287349e-07, + "loss": 0.5362, + "step": 20124 + }, + { + "epoch": 0.8304448295782785, + "grad_norm": 3.579579070685999, + "learning_rate": 2.2056338464311787e-07, + "loss": 0.5349, + "step": 20125 + }, + { + "epoch": 0.8304860939176364, + "grad_norm": 2.91833984606964, + "learning_rate": 2.2045875161144023e-07, + "loss": 0.4932, + "step": 20126 + }, + { + "epoch": 0.8305273582569943, + "grad_norm": 2.1068908288864563, + "learning_rate": 2.2035414143556954e-07, + "loss": 0.4698, + "step": 20127 + }, + { + "epoch": 0.8305686225963522, + "grad_norm": 2.6451327441744117, + "learning_rate": 2.2024955411737492e-07, + "loss": 0.5149, + "step": 20128 + }, + { + "epoch": 0.8306098869357101, + "grad_norm": 2.611838072326511, + "learning_rate": 2.2014498965872472e-07, + "loss": 0.5042, + "step": 20129 + }, + { + "epoch": 0.8306511512750681, + "grad_norm": 6.374680209417159, + "learning_rate": 2.2004044806148727e-07, + "loss": 0.4891, + "step": 20130 + }, + { + "epoch": 0.830692415614426, + "grad_norm": 2.999364223715355, + "learning_rate": 2.1993592932752877e-07, + "loss": 0.5621, + "step": 20131 + }, + { + "epoch": 0.8307336799537839, + "grad_norm": 1.99949496312323, + "learning_rate": 2.1983143345871703e-07, + "loss": 0.4861, + "step": 20132 + }, + { + "epoch": 0.8307749442931419, + "grad_norm": 2.3918481413861628, + "learning_rate": 2.1972696045691825e-07, + "loss": 0.4375, + "step": 20133 + }, + { + "epoch": 0.8308162086324998, + "grad_norm": 3.0400762747063386, + "learning_rate": 2.196225103239991e-07, + "loss": 0.4965, + "step": 20134 + }, + { + "epoch": 0.8308574729718577, + "grad_norm": 2.9934275427003674, + "learning_rate": 2.195180830618244e-07, + "loss": 0.53, + "step": 20135 + }, + { + "epoch": 0.8308987373112157, + "grad_norm": 2.2335065899081195, + "learning_rate": 2.194136786722602e-07, + "loss": 0.4517, + "step": 20136 + }, + { + "epoch": 0.8309400016505736, + "grad_norm": 15.092408174586703, + "learning_rate": 2.193092971571713e-07, + "loss": 0.5443, + "step": 20137 + }, + { + "epoch": 0.8309812659899315, + "grad_norm": 7.929016346241819, + "learning_rate": 2.1920493851842238e-07, + "loss": 0.5303, + "step": 20138 + }, + { + "epoch": 0.8310225303292894, + "grad_norm": 2.365628218449513, + "learning_rate": 2.1910060275787729e-07, + "loss": 0.5433, + "step": 20139 + }, + { + "epoch": 0.8310637946686473, + "grad_norm": 3.134092002796441, + "learning_rate": 2.1899628987739956e-07, + "loss": 0.4971, + "step": 20140 + }, + { + "epoch": 0.8311050590080052, + "grad_norm": 2.980541460959433, + "learning_rate": 2.188919998788525e-07, + "loss": 0.5359, + "step": 20141 + }, + { + "epoch": 0.8311463233473632, + "grad_norm": 3.5778818514227995, + "learning_rate": 2.187877327640993e-07, + "loss": 0.5887, + "step": 20142 + }, + { + "epoch": 0.8311875876867212, + "grad_norm": 4.625925017311443, + "learning_rate": 2.1868348853500247e-07, + "loss": 0.5379, + "step": 20143 + }, + { + "epoch": 0.8312288520260791, + "grad_norm": 3.692920517004003, + "learning_rate": 2.1857926719342368e-07, + "loss": 0.5088, + "step": 20144 + }, + { + "epoch": 0.831270116365437, + "grad_norm": 2.5364353636635464, + "learning_rate": 2.184750687412248e-07, + "loss": 0.4474, + "step": 20145 + }, + { + "epoch": 0.8313113807047949, + "grad_norm": 5.048050054146591, + "learning_rate": 2.1837089318026714e-07, + "loss": 0.4792, + "step": 20146 + }, + { + "epoch": 0.8313526450441528, + "grad_norm": 3.776925809391217, + "learning_rate": 2.1826674051241175e-07, + "loss": 0.587, + "step": 20147 + }, + { + "epoch": 0.8313939093835108, + "grad_norm": 2.336081208142912, + "learning_rate": 2.1816261073951827e-07, + "loss": 0.4909, + "step": 20148 + }, + { + "epoch": 0.8314351737228687, + "grad_norm": 4.475150356459863, + "learning_rate": 2.180585038634474e-07, + "loss": 0.4949, + "step": 20149 + }, + { + "epoch": 0.8314764380622266, + "grad_norm": 2.8986678796276686, + "learning_rate": 2.1795441988605896e-07, + "loss": 0.5488, + "step": 20150 + }, + { + "epoch": 0.8315177024015845, + "grad_norm": 3.099476391225668, + "learning_rate": 2.1785035880921135e-07, + "loss": 0.5478, + "step": 20151 + }, + { + "epoch": 0.8315589667409424, + "grad_norm": 3.1624810081628705, + "learning_rate": 2.1774632063476386e-07, + "loss": 0.5336, + "step": 20152 + }, + { + "epoch": 0.8316002310803005, + "grad_norm": 2.4462138847800046, + "learning_rate": 2.1764230536457503e-07, + "loss": 0.4852, + "step": 20153 + }, + { + "epoch": 0.8316414954196584, + "grad_norm": 3.0153862186921967, + "learning_rate": 2.175383130005022e-07, + "loss": 0.5236, + "step": 20154 + }, + { + "epoch": 0.8316827597590163, + "grad_norm": 5.862622180633788, + "learning_rate": 2.174343435444039e-07, + "loss": 0.476, + "step": 20155 + }, + { + "epoch": 0.8317240240983742, + "grad_norm": 2.308742291091948, + "learning_rate": 2.1733039699813612e-07, + "loss": 0.4738, + "step": 20156 + }, + { + "epoch": 0.8317652884377321, + "grad_norm": 1.9489884203466288, + "learning_rate": 2.172264733635564e-07, + "loss": 0.5, + "step": 20157 + }, + { + "epoch": 0.83180655277709, + "grad_norm": 1.9411840575246062, + "learning_rate": 2.1712257264252072e-07, + "loss": 0.4943, + "step": 20158 + }, + { + "epoch": 0.831847817116448, + "grad_norm": 4.852620544651867, + "learning_rate": 2.1701869483688546e-07, + "loss": 0.4877, + "step": 20159 + }, + { + "epoch": 0.8318890814558059, + "grad_norm": 3.7520469563084924, + "learning_rate": 2.1691483994850565e-07, + "loss": 0.5141, + "step": 20160 + }, + { + "epoch": 0.8319303457951638, + "grad_norm": 13.637003158884246, + "learning_rate": 2.1681100797923657e-07, + "loss": 0.5788, + "step": 20161 + }, + { + "epoch": 0.8319716101345217, + "grad_norm": 3.9880941764863307, + "learning_rate": 2.1670719893093282e-07, + "loss": 0.5138, + "step": 20162 + }, + { + "epoch": 0.8320128744738797, + "grad_norm": 2.254258911647497, + "learning_rate": 2.166034128054492e-07, + "loss": 0.5251, + "step": 20163 + }, + { + "epoch": 0.8320541388132376, + "grad_norm": 4.912826525364448, + "learning_rate": 2.1649964960463892e-07, + "loss": 0.5127, + "step": 20164 + }, + { + "epoch": 0.8320954031525956, + "grad_norm": 3.586971455506881, + "learning_rate": 2.163959093303558e-07, + "loss": 0.5439, + "step": 20165 + }, + { + "epoch": 0.8321366674919535, + "grad_norm": 13.66416647021858, + "learning_rate": 2.1629219198445305e-07, + "loss": 0.5558, + "step": 20166 + }, + { + "epoch": 0.8321779318313114, + "grad_norm": 2.146622368739688, + "learning_rate": 2.16188497568783e-07, + "loss": 0.4478, + "step": 20167 + }, + { + "epoch": 0.8322191961706693, + "grad_norm": 4.933072036711261, + "learning_rate": 2.160848260851977e-07, + "loss": 0.5141, + "step": 20168 + }, + { + "epoch": 0.8322604605100272, + "grad_norm": 3.2508306869040053, + "learning_rate": 2.159811775355493e-07, + "loss": 0.5293, + "step": 20169 + }, + { + "epoch": 0.8323017248493851, + "grad_norm": 4.133710309716633, + "learning_rate": 2.1587755192168917e-07, + "loss": 0.525, + "step": 20170 + }, + { + "epoch": 0.832342989188743, + "grad_norm": 2.006646069952119, + "learning_rate": 2.1577394924546884e-07, + "loss": 0.5037, + "step": 20171 + }, + { + "epoch": 0.832384253528101, + "grad_norm": 3.558265858867627, + "learning_rate": 2.1567036950873798e-07, + "loss": 0.4939, + "step": 20172 + }, + { + "epoch": 0.832425517867459, + "grad_norm": 3.5099553080147676, + "learning_rate": 2.155668127133471e-07, + "loss": 0.5177, + "step": 20173 + }, + { + "epoch": 0.8324667822068169, + "grad_norm": 2.813704920744583, + "learning_rate": 2.1546327886114624e-07, + "loss": 0.5278, + "step": 20174 + }, + { + "epoch": 0.8325080465461748, + "grad_norm": 2.5616536626581, + "learning_rate": 2.1535976795398493e-07, + "loss": 0.5091, + "step": 20175 + }, + { + "epoch": 0.8325493108855327, + "grad_norm": 2.872379935881169, + "learning_rate": 2.152562799937115e-07, + "loss": 0.5497, + "step": 20176 + }, + { + "epoch": 0.8325905752248907, + "grad_norm": 4.171891330142542, + "learning_rate": 2.151528149821748e-07, + "loss": 0.5052, + "step": 20177 + }, + { + "epoch": 0.8326318395642486, + "grad_norm": 4.561502596463465, + "learning_rate": 2.1504937292122306e-07, + "loss": 0.5058, + "step": 20178 + }, + { + "epoch": 0.8326731039036065, + "grad_norm": 7.126884865155291, + "learning_rate": 2.1494595381270444e-07, + "loss": 0.5545, + "step": 20179 + }, + { + "epoch": 0.8327143682429644, + "grad_norm": 2.9887022989311545, + "learning_rate": 2.1484255765846562e-07, + "loss": 0.53, + "step": 20180 + }, + { + "epoch": 0.8327556325823223, + "grad_norm": 4.568773915202852, + "learning_rate": 2.1473918446035347e-07, + "loss": 0.5623, + "step": 20181 + }, + { + "epoch": 0.8327968969216802, + "grad_norm": 2.924614916771942, + "learning_rate": 2.1463583422021467e-07, + "loss": 0.5164, + "step": 20182 + }, + { + "epoch": 0.8328381612610382, + "grad_norm": 2.5267094941607353, + "learning_rate": 2.145325069398954e-07, + "loss": 0.5011, + "step": 20183 + }, + { + "epoch": 0.8328794256003962, + "grad_norm": 2.191207534327135, + "learning_rate": 2.1442920262124156e-07, + "loss": 0.5286, + "step": 20184 + }, + { + "epoch": 0.8329206899397541, + "grad_norm": 3.831248385184424, + "learning_rate": 2.143259212660978e-07, + "loss": 0.4848, + "step": 20185 + }, + { + "epoch": 0.832961954279112, + "grad_norm": 3.633225768460091, + "learning_rate": 2.142226628763095e-07, + "loss": 0.4908, + "step": 20186 + }, + { + "epoch": 0.8330032186184699, + "grad_norm": 1.9128214332330082, + "learning_rate": 2.1411942745372114e-07, + "loss": 0.4661, + "step": 20187 + }, + { + "epoch": 0.8330444829578278, + "grad_norm": 26.13498198746726, + "learning_rate": 2.1401621500017633e-07, + "loss": 0.5007, + "step": 20188 + }, + { + "epoch": 0.8330857472971858, + "grad_norm": 4.736326211797991, + "learning_rate": 2.13913025517519e-07, + "loss": 0.4942, + "step": 20189 + }, + { + "epoch": 0.8331270116365437, + "grad_norm": 2.203971286898061, + "learning_rate": 2.1380985900759225e-07, + "loss": 0.5132, + "step": 20190 + }, + { + "epoch": 0.8331682759759016, + "grad_norm": 3.2899969177422292, + "learning_rate": 2.1370671547223942e-07, + "loss": 0.5275, + "step": 20191 + }, + { + "epoch": 0.8332095403152595, + "grad_norm": 6.713377513111096, + "learning_rate": 2.1360359491330234e-07, + "loss": 0.5333, + "step": 20192 + }, + { + "epoch": 0.8332508046546174, + "grad_norm": 3.023914168122397, + "learning_rate": 2.1350049733262288e-07, + "loss": 0.55, + "step": 20193 + }, + { + "epoch": 0.8332920689939755, + "grad_norm": 2.5575952088777067, + "learning_rate": 2.1339742273204276e-07, + "loss": 0.5212, + "step": 20194 + }, + { + "epoch": 0.8333333333333334, + "grad_norm": 6.514633411493301, + "learning_rate": 2.1329437111340334e-07, + "loss": 0.4725, + "step": 20195 + }, + { + "epoch": 0.8333745976726913, + "grad_norm": 6.057217322821838, + "learning_rate": 2.131913424785456e-07, + "loss": 0.5736, + "step": 20196 + }, + { + "epoch": 0.8334158620120492, + "grad_norm": 4.444997611844072, + "learning_rate": 2.130883368293093e-07, + "loss": 0.5267, + "step": 20197 + }, + { + "epoch": 0.8334571263514071, + "grad_norm": 9.80591219341008, + "learning_rate": 2.1298535416753455e-07, + "loss": 0.5174, + "step": 20198 + }, + { + "epoch": 0.833498390690765, + "grad_norm": 13.573081759884479, + "learning_rate": 2.1288239449506115e-07, + "loss": 0.4731, + "step": 20199 + }, + { + "epoch": 0.833539655030123, + "grad_norm": 3.0512503435930403, + "learning_rate": 2.127794578137282e-07, + "loss": 0.5218, + "step": 20200 + }, + { + "epoch": 0.8335809193694809, + "grad_norm": 3.0614738006775357, + "learning_rate": 2.1267654412537412e-07, + "loss": 0.5111, + "step": 20201 + }, + { + "epoch": 0.8336221837088388, + "grad_norm": 3.2045811686398253, + "learning_rate": 2.1257365343183727e-07, + "loss": 0.4904, + "step": 20202 + }, + { + "epoch": 0.8336634480481967, + "grad_norm": 2.4789793251618066, + "learning_rate": 2.1247078573495565e-07, + "loss": 0.4748, + "step": 20203 + }, + { + "epoch": 0.8337047123875547, + "grad_norm": 2.764704499615424, + "learning_rate": 2.1236794103656698e-07, + "loss": 0.4806, + "step": 20204 + }, + { + "epoch": 0.8337459767269126, + "grad_norm": 2.869407168790443, + "learning_rate": 2.1226511933850812e-07, + "loss": 0.5154, + "step": 20205 + }, + { + "epoch": 0.8337872410662706, + "grad_norm": 3.942359977808918, + "learning_rate": 2.1216232064261527e-07, + "loss": 0.5371, + "step": 20206 + }, + { + "epoch": 0.8338285054056285, + "grad_norm": 3.4378863454192845, + "learning_rate": 2.1205954495072494e-07, + "loss": 0.4978, + "step": 20207 + }, + { + "epoch": 0.8338697697449864, + "grad_norm": 3.3120095280797215, + "learning_rate": 2.119567922646735e-07, + "loss": 0.5226, + "step": 20208 + }, + { + "epoch": 0.8339110340843443, + "grad_norm": 3.7730694984487454, + "learning_rate": 2.118540625862957e-07, + "loss": 0.5628, + "step": 20209 + }, + { + "epoch": 0.8339522984237022, + "grad_norm": 4.399958335984226, + "learning_rate": 2.1175135591742667e-07, + "loss": 0.5307, + "step": 20210 + }, + { + "epoch": 0.8339935627630601, + "grad_norm": 4.761398543665892, + "learning_rate": 2.1164867225990115e-07, + "loss": 0.5063, + "step": 20211 + }, + { + "epoch": 0.834034827102418, + "grad_norm": 4.449045464755765, + "learning_rate": 2.1154601161555366e-07, + "loss": 0.5522, + "step": 20212 + }, + { + "epoch": 0.834076091441776, + "grad_norm": 3.023842475400495, + "learning_rate": 2.1144337398621737e-07, + "loss": 0.4883, + "step": 20213 + }, + { + "epoch": 0.834117355781134, + "grad_norm": 13.584538136395429, + "learning_rate": 2.1134075937372572e-07, + "loss": 0.5407, + "step": 20214 + }, + { + "epoch": 0.8341586201204919, + "grad_norm": 8.864732132771369, + "learning_rate": 2.1123816777991202e-07, + "loss": 0.4954, + "step": 20215 + }, + { + "epoch": 0.8341998844598498, + "grad_norm": 2.297478588860897, + "learning_rate": 2.111355992066088e-07, + "loss": 0.5541, + "step": 20216 + }, + { + "epoch": 0.8342411487992077, + "grad_norm": 3.3248866658711944, + "learning_rate": 2.110330536556478e-07, + "loss": 0.5152, + "step": 20217 + }, + { + "epoch": 0.8342824131385657, + "grad_norm": 3.282575897365852, + "learning_rate": 2.1093053112886118e-07, + "loss": 0.5614, + "step": 20218 + }, + { + "epoch": 0.8343236774779236, + "grad_norm": 4.3995183072292114, + "learning_rate": 2.1082803162807985e-07, + "loss": 0.4556, + "step": 20219 + }, + { + "epoch": 0.8343649418172815, + "grad_norm": 5.978540720859854, + "learning_rate": 2.107255551551348e-07, + "loss": 0.5111, + "step": 20220 + }, + { + "epoch": 0.8344062061566394, + "grad_norm": 4.722662592059161, + "learning_rate": 2.1062310171185696e-07, + "loss": 0.4939, + "step": 20221 + }, + { + "epoch": 0.8344474704959973, + "grad_norm": 1.9108955093753737, + "learning_rate": 2.1052067130007562e-07, + "loss": 0.5286, + "step": 20222 + }, + { + "epoch": 0.8344887348353552, + "grad_norm": 2.052749920830707, + "learning_rate": 2.1041826392162105e-07, + "loss": 0.4733, + "step": 20223 + }, + { + "epoch": 0.8345299991747133, + "grad_norm": 2.754947917296475, + "learning_rate": 2.1031587957832228e-07, + "loss": 0.5245, + "step": 20224 + }, + { + "epoch": 0.8345712635140712, + "grad_norm": 4.339625264065328, + "learning_rate": 2.1021351827200846e-07, + "loss": 0.5074, + "step": 20225 + }, + { + "epoch": 0.8346125278534291, + "grad_norm": 5.745931794087007, + "learning_rate": 2.1011118000450752e-07, + "loss": 0.4891, + "step": 20226 + }, + { + "epoch": 0.834653792192787, + "grad_norm": 3.023959553600562, + "learning_rate": 2.1000886477764763e-07, + "loss": 0.4911, + "step": 20227 + }, + { + "epoch": 0.8346950565321449, + "grad_norm": 3.2962047271386394, + "learning_rate": 2.0990657259325667e-07, + "loss": 0.4639, + "step": 20228 + }, + { + "epoch": 0.8347363208715028, + "grad_norm": 3.261540082062769, + "learning_rate": 2.0980430345316155e-07, + "loss": 0.6005, + "step": 20229 + }, + { + "epoch": 0.8347775852108608, + "grad_norm": 4.643631564923014, + "learning_rate": 2.0970205735918906e-07, + "loss": 0.5173, + "step": 20230 + }, + { + "epoch": 0.8348188495502187, + "grad_norm": 2.623257853940948, + "learning_rate": 2.0959983431316598e-07, + "loss": 0.5275, + "step": 20231 + }, + { + "epoch": 0.8348601138895766, + "grad_norm": 5.090835689509399, + "learning_rate": 2.0949763431691765e-07, + "loss": 0.554, + "step": 20232 + }, + { + "epoch": 0.8349013782289345, + "grad_norm": 2.136616740905241, + "learning_rate": 2.0939545737227029e-07, + "loss": 0.5097, + "step": 20233 + }, + { + "epoch": 0.8349426425682925, + "grad_norm": 2.8209258841104563, + "learning_rate": 2.0929330348104826e-07, + "loss": 0.488, + "step": 20234 + }, + { + "epoch": 0.8349839069076505, + "grad_norm": 13.23576423963552, + "learning_rate": 2.091911726450766e-07, + "loss": 0.5323, + "step": 20235 + }, + { + "epoch": 0.8350251712470084, + "grad_norm": 3.1433144506445556, + "learning_rate": 2.0908906486617986e-07, + "loss": 0.5145, + "step": 20236 + }, + { + "epoch": 0.8350664355863663, + "grad_norm": 2.406787002080091, + "learning_rate": 2.0898698014618206e-07, + "loss": 0.4938, + "step": 20237 + }, + { + "epoch": 0.8351076999257242, + "grad_norm": 2.1830683500836803, + "learning_rate": 2.0888491848690593e-07, + "loss": 0.5135, + "step": 20238 + }, + { + "epoch": 0.8351489642650821, + "grad_norm": 3.1447340900342877, + "learning_rate": 2.087828798901752e-07, + "loss": 0.5273, + "step": 20239 + }, + { + "epoch": 0.83519022860444, + "grad_norm": 3.2002360066696585, + "learning_rate": 2.0868086435781236e-07, + "loss": 0.5172, + "step": 20240 + }, + { + "epoch": 0.835231492943798, + "grad_norm": 60.462108532447466, + "learning_rate": 2.0857887189164e-07, + "loss": 0.538, + "step": 20241 + }, + { + "epoch": 0.8352727572831559, + "grad_norm": 3.3899752650397237, + "learning_rate": 2.084769024934793e-07, + "loss": 0.5007, + "step": 20242 + }, + { + "epoch": 0.8353140216225138, + "grad_norm": 2.871303214285993, + "learning_rate": 2.0837495616515195e-07, + "loss": 0.5519, + "step": 20243 + }, + { + "epoch": 0.8353552859618717, + "grad_norm": 2.124410095624484, + "learning_rate": 2.0827303290847903e-07, + "loss": 0.527, + "step": 20244 + }, + { + "epoch": 0.8353965503012297, + "grad_norm": 6.490989194322701, + "learning_rate": 2.0817113272528193e-07, + "loss": 0.5314, + "step": 20245 + }, + { + "epoch": 0.8354378146405876, + "grad_norm": 2.56835742503269, + "learning_rate": 2.0806925561737932e-07, + "loss": 0.4709, + "step": 20246 + }, + { + "epoch": 0.8354790789799456, + "grad_norm": 5.713504950195618, + "learning_rate": 2.0796740158659162e-07, + "loss": 0.502, + "step": 20247 + }, + { + "epoch": 0.8355203433193035, + "grad_norm": 2.553000556864947, + "learning_rate": 2.0786557063473848e-07, + "loss": 0.4983, + "step": 20248 + }, + { + "epoch": 0.8355616076586614, + "grad_norm": 3.0648559129778286, + "learning_rate": 2.0776376276363884e-07, + "loss": 0.5274, + "step": 20249 + }, + { + "epoch": 0.8356028719980193, + "grad_norm": 3.593576153149129, + "learning_rate": 2.0766197797511089e-07, + "loss": 0.4533, + "step": 20250 + }, + { + "epoch": 0.8356441363373772, + "grad_norm": 2.9450217447054334, + "learning_rate": 2.0756021627097283e-07, + "loss": 0.5603, + "step": 20251 + }, + { + "epoch": 0.8356854006767351, + "grad_norm": 2.1941704144487972, + "learning_rate": 2.0745847765304272e-07, + "loss": 0.4637, + "step": 20252 + }, + { + "epoch": 0.835726665016093, + "grad_norm": 16.51366054463958, + "learning_rate": 2.0735676212313776e-07, + "loss": 0.514, + "step": 20253 + }, + { + "epoch": 0.835767929355451, + "grad_norm": 3.7441980281521716, + "learning_rate": 2.0725506968307434e-07, + "loss": 0.4577, + "step": 20254 + }, + { + "epoch": 0.835809193694809, + "grad_norm": 4.094600689530036, + "learning_rate": 2.0715340033466952e-07, + "loss": 0.5006, + "step": 20255 + }, + { + "epoch": 0.8358504580341669, + "grad_norm": 2.621777971174624, + "learning_rate": 2.0705175407973915e-07, + "loss": 0.4776, + "step": 20256 + }, + { + "epoch": 0.8358917223735248, + "grad_norm": 9.6418960602489, + "learning_rate": 2.0695013092009913e-07, + "loss": 0.5447, + "step": 20257 + }, + { + "epoch": 0.8359329867128827, + "grad_norm": 4.6411042418892015, + "learning_rate": 2.068485308575645e-07, + "loss": 0.487, + "step": 20258 + }, + { + "epoch": 0.8359742510522407, + "grad_norm": 9.034685919610714, + "learning_rate": 2.0674695389394982e-07, + "loss": 0.5055, + "step": 20259 + }, + { + "epoch": 0.8360155153915986, + "grad_norm": 3.2838844140122974, + "learning_rate": 2.066454000310698e-07, + "loss": 0.4876, + "step": 20260 + }, + { + "epoch": 0.8360567797309565, + "grad_norm": 2.3389238187940453, + "learning_rate": 2.065438692707383e-07, + "loss": 0.4998, + "step": 20261 + }, + { + "epoch": 0.8360980440703144, + "grad_norm": 2.84074433335088, + "learning_rate": 2.0644236161476942e-07, + "loss": 0.4452, + "step": 20262 + }, + { + "epoch": 0.8361393084096723, + "grad_norm": 4.651052360178712, + "learning_rate": 2.063408770649755e-07, + "loss": 0.4964, + "step": 20263 + }, + { + "epoch": 0.8361805727490302, + "grad_norm": 2.8984317199264256, + "learning_rate": 2.0623941562316994e-07, + "loss": 0.4782, + "step": 20264 + }, + { + "epoch": 0.8362218370883883, + "grad_norm": 5.839021468782948, + "learning_rate": 2.0613797729116513e-07, + "loss": 0.5735, + "step": 20265 + }, + { + "epoch": 0.8362631014277462, + "grad_norm": 2.3202580701747744, + "learning_rate": 2.0603656207077242e-07, + "loss": 0.5041, + "step": 20266 + }, + { + "epoch": 0.8363043657671041, + "grad_norm": 2.552473888978747, + "learning_rate": 2.059351699638039e-07, + "loss": 0.5136, + "step": 20267 + }, + { + "epoch": 0.836345630106462, + "grad_norm": 2.9681914586065394, + "learning_rate": 2.0583380097207026e-07, + "loss": 0.5537, + "step": 20268 + }, + { + "epoch": 0.8363868944458199, + "grad_norm": 3.0290841825190724, + "learning_rate": 2.0573245509738292e-07, + "loss": 0.5468, + "step": 20269 + }, + { + "epoch": 0.8364281587851778, + "grad_norm": 5.15605691820264, + "learning_rate": 2.056311323415514e-07, + "loss": 0.4802, + "step": 20270 + }, + { + "epoch": 0.8364694231245358, + "grad_norm": 6.799729681944567, + "learning_rate": 2.0552983270638608e-07, + "loss": 0.4482, + "step": 20271 + }, + { + "epoch": 0.8365106874638937, + "grad_norm": 3.8231929691060698, + "learning_rate": 2.0542855619369604e-07, + "loss": 0.485, + "step": 20272 + }, + { + "epoch": 0.8365519518032516, + "grad_norm": 2.2137774288624716, + "learning_rate": 2.0532730280529032e-07, + "loss": 0.4691, + "step": 20273 + }, + { + "epoch": 0.8365932161426095, + "grad_norm": 4.324223318943269, + "learning_rate": 2.0522607254297815e-07, + "loss": 0.5235, + "step": 20274 + }, + { + "epoch": 0.8366344804819675, + "grad_norm": 3.045265159230998, + "learning_rate": 2.0512486540856708e-07, + "loss": 0.527, + "step": 20275 + }, + { + "epoch": 0.8366757448213255, + "grad_norm": 2.90782542442587, + "learning_rate": 2.0502368140386513e-07, + "loss": 0.5056, + "step": 20276 + }, + { + "epoch": 0.8367170091606834, + "grad_norm": 3.207317071968542, + "learning_rate": 2.049225205306799e-07, + "loss": 0.5096, + "step": 20277 + }, + { + "epoch": 0.8367582735000413, + "grad_norm": 2.3610784604796717, + "learning_rate": 2.048213827908184e-07, + "loss": 0.4831, + "step": 20278 + }, + { + "epoch": 0.8367995378393992, + "grad_norm": 9.447818817875133, + "learning_rate": 2.0472026818608675e-07, + "loss": 0.473, + "step": 20279 + }, + { + "epoch": 0.8368408021787571, + "grad_norm": 1.9009407524532427, + "learning_rate": 2.0461917671829145e-07, + "loss": 0.5026, + "step": 20280 + }, + { + "epoch": 0.836882066518115, + "grad_norm": 2.4308325410492824, + "learning_rate": 2.0451810838923824e-07, + "loss": 0.522, + "step": 20281 + }, + { + "epoch": 0.836923330857473, + "grad_norm": 4.814995216368858, + "learning_rate": 2.0441706320073266e-07, + "loss": 0.5077, + "step": 20282 + }, + { + "epoch": 0.8369645951968309, + "grad_norm": 5.465700299857713, + "learning_rate": 2.0431604115457912e-07, + "loss": 0.53, + "step": 20283 + }, + { + "epoch": 0.8370058595361888, + "grad_norm": 2.3099125021213798, + "learning_rate": 2.0421504225258268e-07, + "loss": 0.4838, + "step": 20284 + }, + { + "epoch": 0.8370471238755468, + "grad_norm": 2.4036878131434984, + "learning_rate": 2.0411406649654673e-07, + "loss": 0.5124, + "step": 20285 + }, + { + "epoch": 0.8370883882149047, + "grad_norm": 50.937783073131, + "learning_rate": 2.040131138882758e-07, + "loss": 0.5452, + "step": 20286 + }, + { + "epoch": 0.8371296525542626, + "grad_norm": 2.4797104815705606, + "learning_rate": 2.0391218442957232e-07, + "loss": 0.5597, + "step": 20287 + }, + { + "epoch": 0.8371709168936206, + "grad_norm": 2.9348188078013178, + "learning_rate": 2.0381127812223964e-07, + "loss": 0.4501, + "step": 20288 + }, + { + "epoch": 0.8372121812329785, + "grad_norm": 13.043489720339384, + "learning_rate": 2.0371039496808003e-07, + "loss": 0.4936, + "step": 20289 + }, + { + "epoch": 0.8372534455723364, + "grad_norm": 2.3628445191807708, + "learning_rate": 2.03609534968896e-07, + "loss": 0.5514, + "step": 20290 + }, + { + "epoch": 0.8372947099116943, + "grad_norm": 2.406539861401489, + "learning_rate": 2.0350869812648832e-07, + "loss": 0.5158, + "step": 20291 + }, + { + "epoch": 0.8373359742510522, + "grad_norm": 2.328277208087382, + "learning_rate": 2.034078844426585e-07, + "loss": 0.5551, + "step": 20292 + }, + { + "epoch": 0.8373772385904101, + "grad_norm": 2.910975317106971, + "learning_rate": 2.0330709391920765e-07, + "loss": 0.5148, + "step": 20293 + }, + { + "epoch": 0.837418502929768, + "grad_norm": 5.101368722485335, + "learning_rate": 2.0320632655793615e-07, + "loss": 0.4714, + "step": 20294 + }, + { + "epoch": 0.8374597672691261, + "grad_norm": 2.938524776297409, + "learning_rate": 2.0310558236064353e-07, + "loss": 0.466, + "step": 20295 + }, + { + "epoch": 0.837501031608484, + "grad_norm": 2.3442731358240314, + "learning_rate": 2.0300486132912938e-07, + "loss": 0.4837, + "step": 20296 + }, + { + "epoch": 0.8375422959478419, + "grad_norm": 4.202125783195485, + "learning_rate": 2.0290416346519342e-07, + "loss": 0.5087, + "step": 20297 + }, + { + "epoch": 0.8375835602871998, + "grad_norm": 6.424659214366389, + "learning_rate": 2.0280348877063372e-07, + "loss": 0.5532, + "step": 20298 + }, + { + "epoch": 0.8376248246265577, + "grad_norm": 13.224140422401671, + "learning_rate": 2.0270283724724897e-07, + "loss": 0.5403, + "step": 20299 + }, + { + "epoch": 0.8376660889659157, + "grad_norm": 2.4951508089811254, + "learning_rate": 2.0260220889683662e-07, + "loss": 0.5463, + "step": 20300 + }, + { + "epoch": 0.8377073533052736, + "grad_norm": 2.9920270091567343, + "learning_rate": 2.0250160372119436e-07, + "loss": 0.4541, + "step": 20301 + }, + { + "epoch": 0.8377486176446315, + "grad_norm": 5.72527141943156, + "learning_rate": 2.0240102172211926e-07, + "loss": 0.5548, + "step": 20302 + }, + { + "epoch": 0.8377898819839894, + "grad_norm": 5.498935229422182, + "learning_rate": 2.023004629014084e-07, + "loss": 0.543, + "step": 20303 + }, + { + "epoch": 0.8378311463233473, + "grad_norm": 2.809488779200622, + "learning_rate": 2.0219992726085735e-07, + "loss": 0.53, + "step": 20304 + }, + { + "epoch": 0.8378724106627052, + "grad_norm": 5.4840584701292325, + "learning_rate": 2.0209941480226213e-07, + "loss": 0.487, + "step": 20305 + }, + { + "epoch": 0.8379136750020633, + "grad_norm": 3.1185805568763882, + "learning_rate": 2.019989255274185e-07, + "loss": 0.5187, + "step": 20306 + }, + { + "epoch": 0.8379549393414212, + "grad_norm": 4.628356244113843, + "learning_rate": 2.0189845943812085e-07, + "loss": 0.5012, + "step": 20307 + }, + { + "epoch": 0.8379962036807791, + "grad_norm": 2.020352329939691, + "learning_rate": 2.017980165361641e-07, + "loss": 0.4619, + "step": 20308 + }, + { + "epoch": 0.838037468020137, + "grad_norm": 4.070431597146757, + "learning_rate": 2.0169759682334244e-07, + "loss": 0.524, + "step": 20309 + }, + { + "epoch": 0.8380787323594949, + "grad_norm": 1.8663797690602266, + "learning_rate": 2.015972003014498e-07, + "loss": 0.4673, + "step": 20310 + }, + { + "epoch": 0.8381199966988528, + "grad_norm": 2.962711292538272, + "learning_rate": 2.0149682697227923e-07, + "loss": 0.5114, + "step": 20311 + }, + { + "epoch": 0.8381612610382108, + "grad_norm": 2.4707234905928357, + "learning_rate": 2.0139647683762346e-07, + "loss": 0.5091, + "step": 20312 + }, + { + "epoch": 0.8382025253775687, + "grad_norm": 5.209076841035197, + "learning_rate": 2.0129614989927526e-07, + "loss": 0.5273, + "step": 20313 + }, + { + "epoch": 0.8382437897169266, + "grad_norm": 3.3084824703642854, + "learning_rate": 2.0119584615902649e-07, + "loss": 0.4878, + "step": 20314 + }, + { + "epoch": 0.8382850540562845, + "grad_norm": 2.145363081257611, + "learning_rate": 2.0109556561866953e-07, + "loss": 0.5176, + "step": 20315 + }, + { + "epoch": 0.8383263183956425, + "grad_norm": 2.440546663592653, + "learning_rate": 2.009953082799947e-07, + "loss": 0.4816, + "step": 20316 + }, + { + "epoch": 0.8383675827350004, + "grad_norm": 2.443850282416936, + "learning_rate": 2.008950741447933e-07, + "loss": 0.4681, + "step": 20317 + }, + { + "epoch": 0.8384088470743584, + "grad_norm": 2.8661553276271916, + "learning_rate": 2.007948632148558e-07, + "loss": 0.4971, + "step": 20318 + }, + { + "epoch": 0.8384501114137163, + "grad_norm": 2.841033591522401, + "learning_rate": 2.006946754919724e-07, + "loss": 0.5158, + "step": 20319 + }, + { + "epoch": 0.8384913757530742, + "grad_norm": 15.907770056371657, + "learning_rate": 2.005945109779322e-07, + "loss": 0.5544, + "step": 20320 + }, + { + "epoch": 0.8385326400924321, + "grad_norm": 4.1287761874624325, + "learning_rate": 2.0049436967452456e-07, + "loss": 0.4414, + "step": 20321 + }, + { + "epoch": 0.83857390443179, + "grad_norm": 2.3743767565326976, + "learning_rate": 2.003942515835384e-07, + "loss": 0.4337, + "step": 20322 + }, + { + "epoch": 0.838615168771148, + "grad_norm": 5.730035397079574, + "learning_rate": 2.0029415670676267e-07, + "loss": 0.4869, + "step": 20323 + }, + { + "epoch": 0.8386564331105059, + "grad_norm": 2.7107914571337153, + "learning_rate": 2.0019408504598403e-07, + "loss": 0.5255, + "step": 20324 + }, + { + "epoch": 0.8386976974498638, + "grad_norm": 4.559431182868116, + "learning_rate": 2.0009403660299057e-07, + "loss": 0.4905, + "step": 20325 + }, + { + "epoch": 0.8387389617892218, + "grad_norm": 3.113312442701939, + "learning_rate": 1.9999401137956957e-07, + "loss": 0.5332, + "step": 20326 + }, + { + "epoch": 0.8387802261285797, + "grad_norm": 2.757952347616869, + "learning_rate": 1.998940093775079e-07, + "loss": 0.4792, + "step": 20327 + }, + { + "epoch": 0.8388214904679376, + "grad_norm": 2.829180209076386, + "learning_rate": 1.997940305985913e-07, + "loss": 0.5849, + "step": 20328 + }, + { + "epoch": 0.8388627548072956, + "grad_norm": 9.187754260002569, + "learning_rate": 1.9969407504460602e-07, + "loss": 0.5396, + "step": 20329 + }, + { + "epoch": 0.8389040191466535, + "grad_norm": 2.6314154164780343, + "learning_rate": 1.995941427173373e-07, + "loss": 0.5074, + "step": 20330 + }, + { + "epoch": 0.8389452834860114, + "grad_norm": 3.8717200684456268, + "learning_rate": 1.9949423361857066e-07, + "loss": 0.5346, + "step": 20331 + }, + { + "epoch": 0.8389865478253693, + "grad_norm": 10.2614678414699, + "learning_rate": 1.9939434775009009e-07, + "loss": 0.5137, + "step": 20332 + }, + { + "epoch": 0.8390278121647272, + "grad_norm": 2.464903383155536, + "learning_rate": 1.992944851136801e-07, + "loss": 0.5361, + "step": 20333 + }, + { + "epoch": 0.8390690765040851, + "grad_norm": 4.1336190138825595, + "learning_rate": 1.9919464571112443e-07, + "loss": 0.4605, + "step": 20334 + }, + { + "epoch": 0.839110340843443, + "grad_norm": 2.860681511537381, + "learning_rate": 1.9909482954420688e-07, + "loss": 0.5201, + "step": 20335 + }, + { + "epoch": 0.8391516051828011, + "grad_norm": 4.079550592921806, + "learning_rate": 1.9899503661470964e-07, + "loss": 0.5476, + "step": 20336 + }, + { + "epoch": 0.839192869522159, + "grad_norm": 2.9269641072623602, + "learning_rate": 1.9889526692441597e-07, + "loss": 0.4785, + "step": 20337 + }, + { + "epoch": 0.8392341338615169, + "grad_norm": 2.8167044426963064, + "learning_rate": 1.987955204751074e-07, + "loss": 0.5396, + "step": 20338 + }, + { + "epoch": 0.8392753982008748, + "grad_norm": 3.155281195155937, + "learning_rate": 1.9869579726856606e-07, + "loss": 0.4852, + "step": 20339 + }, + { + "epoch": 0.8393166625402327, + "grad_norm": 3.664071809443011, + "learning_rate": 1.9859609730657335e-07, + "loss": 0.522, + "step": 20340 + }, + { + "epoch": 0.8393579268795907, + "grad_norm": 3.19611344918565, + "learning_rate": 1.9849642059090966e-07, + "loss": 0.4418, + "step": 20341 + }, + { + "epoch": 0.8393991912189486, + "grad_norm": 3.126205524965018, + "learning_rate": 1.9839676712335574e-07, + "loss": 0.5384, + "step": 20342 + }, + { + "epoch": 0.8394404555583065, + "grad_norm": 2.3262381542641544, + "learning_rate": 1.98297136905692e-07, + "loss": 0.5189, + "step": 20343 + }, + { + "epoch": 0.8394817198976644, + "grad_norm": 2.16953796231838, + "learning_rate": 1.9819752993969753e-07, + "loss": 0.5343, + "step": 20344 + }, + { + "epoch": 0.8395229842370223, + "grad_norm": 3.2157488303451878, + "learning_rate": 1.980979462271517e-07, + "loss": 0.5518, + "step": 20345 + }, + { + "epoch": 0.8395642485763803, + "grad_norm": 4.884171887846891, + "learning_rate": 1.9799838576983332e-07, + "loss": 0.4282, + "step": 20346 + }, + { + "epoch": 0.8396055129157383, + "grad_norm": 4.170354820145887, + "learning_rate": 1.978988485695214e-07, + "loss": 0.5248, + "step": 20347 + }, + { + "epoch": 0.8396467772550962, + "grad_norm": 4.91676453799839, + "learning_rate": 1.9779933462799288e-07, + "loss": 0.5587, + "step": 20348 + }, + { + "epoch": 0.8396880415944541, + "grad_norm": 2.3386642292684354, + "learning_rate": 1.97699843947026e-07, + "loss": 0.4502, + "step": 20349 + }, + { + "epoch": 0.839729305933812, + "grad_norm": 2.1152453509237397, + "learning_rate": 1.97600376528398e-07, + "loss": 0.5016, + "step": 20350 + }, + { + "epoch": 0.8397705702731699, + "grad_norm": 5.53335908061352, + "learning_rate": 1.9750093237388517e-07, + "loss": 0.5062, + "step": 20351 + }, + { + "epoch": 0.8398118346125278, + "grad_norm": 4.911010516792527, + "learning_rate": 1.974015114852642e-07, + "loss": 0.5177, + "step": 20352 + }, + { + "epoch": 0.8398530989518858, + "grad_norm": 2.1461273045450096, + "learning_rate": 1.973021138643107e-07, + "loss": 0.4973, + "step": 20353 + }, + { + "epoch": 0.8398943632912437, + "grad_norm": 2.926471264636035, + "learning_rate": 1.972027395128002e-07, + "loss": 0.518, + "step": 20354 + }, + { + "epoch": 0.8399356276306016, + "grad_norm": 4.12766304378463, + "learning_rate": 1.9710338843250785e-07, + "loss": 0.4832, + "step": 20355 + }, + { + "epoch": 0.8399768919699596, + "grad_norm": 5.7484979335480615, + "learning_rate": 1.970040606252087e-07, + "loss": 0.5266, + "step": 20356 + }, + { + "epoch": 0.8400181563093175, + "grad_norm": 2.753837619585701, + "learning_rate": 1.9690475609267617e-07, + "loss": 0.5349, + "step": 20357 + }, + { + "epoch": 0.8400594206486754, + "grad_norm": 3.4153946120429675, + "learning_rate": 1.9680547483668466e-07, + "loss": 0.4966, + "step": 20358 + }, + { + "epoch": 0.8401006849880334, + "grad_norm": 4.5100782692453025, + "learning_rate": 1.967062168590074e-07, + "loss": 0.4921, + "step": 20359 + }, + { + "epoch": 0.8401419493273913, + "grad_norm": 3.5946365278204264, + "learning_rate": 1.9660698216141787e-07, + "loss": 0.518, + "step": 20360 + }, + { + "epoch": 0.8401832136667492, + "grad_norm": 2.3059890286874207, + "learning_rate": 1.9650777074568793e-07, + "loss": 0.4714, + "step": 20361 + }, + { + "epoch": 0.8402244780061071, + "grad_norm": 3.1911416231766085, + "learning_rate": 1.9640858261359002e-07, + "loss": 0.5036, + "step": 20362 + }, + { + "epoch": 0.840265742345465, + "grad_norm": 2.440240676785493, + "learning_rate": 1.9630941776689603e-07, + "loss": 0.4944, + "step": 20363 + }, + { + "epoch": 0.8403070066848229, + "grad_norm": 2.8048865352571632, + "learning_rate": 1.962102762073774e-07, + "loss": 0.4899, + "step": 20364 + }, + { + "epoch": 0.8403482710241809, + "grad_norm": 2.2292104906709143, + "learning_rate": 1.961111579368044e-07, + "loss": 0.4709, + "step": 20365 + }, + { + "epoch": 0.8403895353635388, + "grad_norm": 4.207860158251246, + "learning_rate": 1.9601206295694808e-07, + "loss": 0.4393, + "step": 20366 + }, + { + "epoch": 0.8404307997028968, + "grad_norm": 1.6138659662957249, + "learning_rate": 1.9591299126957817e-07, + "loss": 0.4927, + "step": 20367 + }, + { + "epoch": 0.8404720640422547, + "grad_norm": 2.8967122742799183, + "learning_rate": 1.9581394287646497e-07, + "loss": 0.548, + "step": 20368 + }, + { + "epoch": 0.8405133283816126, + "grad_norm": 1.9891421278888324, + "learning_rate": 1.9571491777937706e-07, + "loss": 0.4892, + "step": 20369 + }, + { + "epoch": 0.8405545927209706, + "grad_norm": 3.5108167140433966, + "learning_rate": 1.9561591598008349e-07, + "loss": 0.4266, + "step": 20370 + }, + { + "epoch": 0.8405958570603285, + "grad_norm": 8.425024528633008, + "learning_rate": 1.955169374803529e-07, + "loss": 0.5487, + "step": 20371 + }, + { + "epoch": 0.8406371213996864, + "grad_norm": 3.1932875385076764, + "learning_rate": 1.9541798228195328e-07, + "loss": 0.581, + "step": 20372 + }, + { + "epoch": 0.8406783857390443, + "grad_norm": 2.0544557806418378, + "learning_rate": 1.9531905038665167e-07, + "loss": 0.5023, + "step": 20373 + }, + { + "epoch": 0.8407196500784022, + "grad_norm": 5.482472194624295, + "learning_rate": 1.9522014179621571e-07, + "loss": 0.5073, + "step": 20374 + }, + { + "epoch": 0.8407609144177601, + "grad_norm": 3.2058444759428157, + "learning_rate": 1.9512125651241207e-07, + "loss": 0.5501, + "step": 20375 + }, + { + "epoch": 0.840802178757118, + "grad_norm": 4.1792516305265, + "learning_rate": 1.950223945370075e-07, + "loss": 0.5504, + "step": 20376 + }, + { + "epoch": 0.8408434430964761, + "grad_norm": 2.9714598565361894, + "learning_rate": 1.9492355587176735e-07, + "loss": 0.5406, + "step": 20377 + }, + { + "epoch": 0.840884707435834, + "grad_norm": 8.120093633283211, + "learning_rate": 1.9482474051845712e-07, + "loss": 0.5173, + "step": 20378 + }, + { + "epoch": 0.8409259717751919, + "grad_norm": 1.9937560506064886, + "learning_rate": 1.94725948478842e-07, + "loss": 0.5083, + "step": 20379 + }, + { + "epoch": 0.8409672361145498, + "grad_norm": 2.885389090623113, + "learning_rate": 1.946271797546868e-07, + "loss": 0.4987, + "step": 20380 + }, + { + "epoch": 0.8410085004539077, + "grad_norm": 2.5540935695991225, + "learning_rate": 1.9452843434775607e-07, + "loss": 0.4726, + "step": 20381 + }, + { + "epoch": 0.8410497647932657, + "grad_norm": 1.9676528203638384, + "learning_rate": 1.9442971225981276e-07, + "loss": 0.5312, + "step": 20382 + }, + { + "epoch": 0.8410910291326236, + "grad_norm": 3.5550613412085887, + "learning_rate": 1.9433101349262112e-07, + "loss": 0.4976, + "step": 20383 + }, + { + "epoch": 0.8411322934719815, + "grad_norm": 2.2606432494104296, + "learning_rate": 1.942323380479439e-07, + "loss": 0.5368, + "step": 20384 + }, + { + "epoch": 0.8411735578113394, + "grad_norm": 5.501060119276021, + "learning_rate": 1.941336859275435e-07, + "loss": 0.4965, + "step": 20385 + }, + { + "epoch": 0.8412148221506973, + "grad_norm": 34.05559773910058, + "learning_rate": 1.940350571331822e-07, + "loss": 0.5041, + "step": 20386 + }, + { + "epoch": 0.8412560864900553, + "grad_norm": 2.5383049706893215, + "learning_rate": 1.9393645166662178e-07, + "loss": 0.5439, + "step": 20387 + }, + { + "epoch": 0.8412973508294133, + "grad_norm": 3.200923364167027, + "learning_rate": 1.9383786952962397e-07, + "loss": 0.5312, + "step": 20388 + }, + { + "epoch": 0.8413386151687712, + "grad_norm": 4.61787297213585, + "learning_rate": 1.9373931072394934e-07, + "loss": 0.5028, + "step": 20389 + }, + { + "epoch": 0.8413798795081291, + "grad_norm": 3.843214515508204, + "learning_rate": 1.9364077525135804e-07, + "loss": 0.5088, + "step": 20390 + }, + { + "epoch": 0.841421143847487, + "grad_norm": 3.473285208568791, + "learning_rate": 1.9354226311361044e-07, + "loss": 0.5357, + "step": 20391 + }, + { + "epoch": 0.8414624081868449, + "grad_norm": 3.156171807529627, + "learning_rate": 1.9344377431246635e-07, + "loss": 0.5515, + "step": 20392 + }, + { + "epoch": 0.8415036725262028, + "grad_norm": 2.2969888162894176, + "learning_rate": 1.9334530884968532e-07, + "loss": 0.5091, + "step": 20393 + }, + { + "epoch": 0.8415449368655608, + "grad_norm": 2.32547240594129, + "learning_rate": 1.932468667270253e-07, + "loss": 0.5367, + "step": 20394 + }, + { + "epoch": 0.8415862012049187, + "grad_norm": 5.366557983071721, + "learning_rate": 1.9314844794624553e-07, + "loss": 0.4858, + "step": 20395 + }, + { + "epoch": 0.8416274655442766, + "grad_norm": 5.204409797706792, + "learning_rate": 1.9305005250910346e-07, + "loss": 0.5079, + "step": 20396 + }, + { + "epoch": 0.8416687298836346, + "grad_norm": 2.356360282182992, + "learning_rate": 1.9295168041735732e-07, + "loss": 0.472, + "step": 20397 + }, + { + "epoch": 0.8417099942229925, + "grad_norm": 3.3135183686873835, + "learning_rate": 1.9285333167276375e-07, + "loss": 0.5245, + "step": 20398 + }, + { + "epoch": 0.8417512585623504, + "grad_norm": 4.6678831936662855, + "learning_rate": 1.927550062770795e-07, + "loss": 0.5698, + "step": 20399 + }, + { + "epoch": 0.8417925229017084, + "grad_norm": 7.815418720348727, + "learning_rate": 1.926567042320611e-07, + "loss": 0.533, + "step": 20400 + }, + { + "epoch": 0.8418337872410663, + "grad_norm": 3.081418103620707, + "learning_rate": 1.9255842553946473e-07, + "loss": 0.5401, + "step": 20401 + }, + { + "epoch": 0.8418750515804242, + "grad_norm": 3.666638386874532, + "learning_rate": 1.9246017020104562e-07, + "loss": 0.5284, + "step": 20402 + }, + { + "epoch": 0.8419163159197821, + "grad_norm": 2.686435481786467, + "learning_rate": 1.923619382185585e-07, + "loss": 0.5535, + "step": 20403 + }, + { + "epoch": 0.84195758025914, + "grad_norm": 2.6559065180088313, + "learning_rate": 1.9226372959375837e-07, + "loss": 0.4814, + "step": 20404 + }, + { + "epoch": 0.8419988445984979, + "grad_norm": 3.757035928825575, + "learning_rate": 1.9216554432839996e-07, + "loss": 0.5315, + "step": 20405 + }, + { + "epoch": 0.8420401089378559, + "grad_norm": 4.634268694783991, + "learning_rate": 1.9206738242423617e-07, + "loss": 0.5, + "step": 20406 + }, + { + "epoch": 0.8420813732772139, + "grad_norm": 2.686874087049267, + "learning_rate": 1.9196924388302078e-07, + "loss": 0.4906, + "step": 20407 + }, + { + "epoch": 0.8421226376165718, + "grad_norm": 3.8359718097562077, + "learning_rate": 1.9187112870650692e-07, + "loss": 0.4962, + "step": 20408 + }, + { + "epoch": 0.8421639019559297, + "grad_norm": 4.121643140371848, + "learning_rate": 1.9177303689644766e-07, + "loss": 0.4843, + "step": 20409 + }, + { + "epoch": 0.8422051662952876, + "grad_norm": 3.4161810076716392, + "learning_rate": 1.916749684545941e-07, + "loss": 0.4975, + "step": 20410 + }, + { + "epoch": 0.8422464306346455, + "grad_norm": 2.692784210253676, + "learning_rate": 1.9157692338269868e-07, + "loss": 0.4887, + "step": 20411 + }, + { + "epoch": 0.8422876949740035, + "grad_norm": 2.533111169343388, + "learning_rate": 1.9147890168251247e-07, + "loss": 0.4962, + "step": 20412 + }, + { + "epoch": 0.8423289593133614, + "grad_norm": 6.6097767421387275, + "learning_rate": 1.9138090335578696e-07, + "loss": 0.4921, + "step": 20413 + }, + { + "epoch": 0.8423702236527193, + "grad_norm": 5.988939199758748, + "learning_rate": 1.9128292840427168e-07, + "loss": 0.5253, + "step": 20414 + }, + { + "epoch": 0.8424114879920772, + "grad_norm": 3.2373992847443147, + "learning_rate": 1.9118497682971763e-07, + "loss": 0.5185, + "step": 20415 + }, + { + "epoch": 0.8424527523314351, + "grad_norm": 4.0287246516721105, + "learning_rate": 1.9108704863387373e-07, + "loss": 0.4679, + "step": 20416 + }, + { + "epoch": 0.8424940166707932, + "grad_norm": 2.803455602945097, + "learning_rate": 1.9098914381848953e-07, + "loss": 0.5712, + "step": 20417 + }, + { + "epoch": 0.8425352810101511, + "grad_norm": 6.764720667841243, + "learning_rate": 1.9089126238531419e-07, + "loss": 0.5073, + "step": 20418 + }, + { + "epoch": 0.842576545349509, + "grad_norm": 5.127114815009089, + "learning_rate": 1.9079340433609544e-07, + "loss": 0.5379, + "step": 20419 + }, + { + "epoch": 0.8426178096888669, + "grad_norm": 8.474589822517519, + "learning_rate": 1.9069556967258156e-07, + "loss": 0.5084, + "step": 20420 + }, + { + "epoch": 0.8426590740282248, + "grad_norm": 4.639212083084299, + "learning_rate": 1.9059775839652065e-07, + "loss": 0.497, + "step": 20421 + }, + { + "epoch": 0.8427003383675827, + "grad_norm": 4.100082882761806, + "learning_rate": 1.90499970509659e-07, + "loss": 0.4895, + "step": 20422 + }, + { + "epoch": 0.8427416027069407, + "grad_norm": 3.7185959259348413, + "learning_rate": 1.9040220601374368e-07, + "loss": 0.4714, + "step": 20423 + }, + { + "epoch": 0.8427828670462986, + "grad_norm": 3.424363224872445, + "learning_rate": 1.9030446491052116e-07, + "loss": 0.5025, + "step": 20424 + }, + { + "epoch": 0.8428241313856565, + "grad_norm": 16.618304866433117, + "learning_rate": 1.9020674720173753e-07, + "loss": 0.5383, + "step": 20425 + }, + { + "epoch": 0.8428653957250144, + "grad_norm": 4.136959834058182, + "learning_rate": 1.901090528891377e-07, + "loss": 0.5595, + "step": 20426 + }, + { + "epoch": 0.8429066600643723, + "grad_norm": 4.115860855790015, + "learning_rate": 1.9001138197446715e-07, + "loss": 0.48, + "step": 20427 + }, + { + "epoch": 0.8429479244037303, + "grad_norm": 3.703132668067106, + "learning_rate": 1.899137344594703e-07, + "loss": 0.5323, + "step": 20428 + }, + { + "epoch": 0.8429891887430883, + "grad_norm": 2.5651066306406007, + "learning_rate": 1.8981611034589174e-07, + "loss": 0.5092, + "step": 20429 + }, + { + "epoch": 0.8430304530824462, + "grad_norm": 8.17436129051446, + "learning_rate": 1.897185096354751e-07, + "loss": 0.475, + "step": 20430 + }, + { + "epoch": 0.8430717174218041, + "grad_norm": 7.042231424092364, + "learning_rate": 1.896209323299633e-07, + "loss": 0.4777, + "step": 20431 + }, + { + "epoch": 0.843112981761162, + "grad_norm": 4.498611374906168, + "learning_rate": 1.895233784310998e-07, + "loss": 0.4724, + "step": 20432 + }, + { + "epoch": 0.8431542461005199, + "grad_norm": 1.9597208879908565, + "learning_rate": 1.89425847940627e-07, + "loss": 0.4492, + "step": 20433 + }, + { + "epoch": 0.8431955104398778, + "grad_norm": 2.5013916821154973, + "learning_rate": 1.8932834086028756e-07, + "loss": 0.5492, + "step": 20434 + }, + { + "epoch": 0.8432367747792358, + "grad_norm": 3.417700718940169, + "learning_rate": 1.8923085719182238e-07, + "loss": 0.522, + "step": 20435 + }, + { + "epoch": 0.8432780391185937, + "grad_norm": 5.652991411338954, + "learning_rate": 1.8913339693697306e-07, + "loss": 0.5123, + "step": 20436 + }, + { + "epoch": 0.8433193034579516, + "grad_norm": 2.961860063015584, + "learning_rate": 1.890359600974806e-07, + "loss": 0.513, + "step": 20437 + }, + { + "epoch": 0.8433605677973096, + "grad_norm": 3.0453119725209925, + "learning_rate": 1.889385466750857e-07, + "loss": 0.5208, + "step": 20438 + }, + { + "epoch": 0.8434018321366675, + "grad_norm": 10.0668799312141, + "learning_rate": 1.8884115667152785e-07, + "loss": 0.528, + "step": 20439 + }, + { + "epoch": 0.8434430964760254, + "grad_norm": 8.574786862761234, + "learning_rate": 1.8874379008854696e-07, + "loss": 0.5445, + "step": 20440 + }, + { + "epoch": 0.8434843608153834, + "grad_norm": 13.462684034474163, + "learning_rate": 1.8864644692788236e-07, + "loss": 0.5037, + "step": 20441 + }, + { + "epoch": 0.8435256251547413, + "grad_norm": 3.7610611414111994, + "learning_rate": 1.8854912719127326e-07, + "loss": 0.5233, + "step": 20442 + }, + { + "epoch": 0.8435668894940992, + "grad_norm": 3.3144517462207053, + "learning_rate": 1.884518308804568e-07, + "loss": 0.5114, + "step": 20443 + }, + { + "epoch": 0.8436081538334571, + "grad_norm": 2.2706623779130135, + "learning_rate": 1.883545579971716e-07, + "loss": 0.5355, + "step": 20444 + }, + { + "epoch": 0.843649418172815, + "grad_norm": 3.8071579764887553, + "learning_rate": 1.8825730854315527e-07, + "loss": 0.5665, + "step": 20445 + }, + { + "epoch": 0.8436906825121729, + "grad_norm": 6.823829088926387, + "learning_rate": 1.8816008252014538e-07, + "loss": 0.5374, + "step": 20446 + }, + { + "epoch": 0.8437319468515309, + "grad_norm": 8.276419765468992, + "learning_rate": 1.8806287992987758e-07, + "loss": 0.5201, + "step": 20447 + }, + { + "epoch": 0.8437732111908889, + "grad_norm": 4.640559425221512, + "learning_rate": 1.879657007740888e-07, + "loss": 0.5101, + "step": 20448 + }, + { + "epoch": 0.8438144755302468, + "grad_norm": 2.361875305893946, + "learning_rate": 1.8786854505451485e-07, + "loss": 0.5181, + "step": 20449 + }, + { + "epoch": 0.8438557398696047, + "grad_norm": 2.30750302645576, + "learning_rate": 1.8777141277289145e-07, + "loss": 0.5284, + "step": 20450 + }, + { + "epoch": 0.8438970042089626, + "grad_norm": 3.249568507671836, + "learning_rate": 1.8767430393095293e-07, + "loss": 0.4925, + "step": 20451 + }, + { + "epoch": 0.8439382685483205, + "grad_norm": 2.310314130821703, + "learning_rate": 1.8757721853043435e-07, + "loss": 0.5497, + "step": 20452 + }, + { + "epoch": 0.8439795328876785, + "grad_norm": 3.278011866056551, + "learning_rate": 1.8748015657306971e-07, + "loss": 0.4884, + "step": 20453 + }, + { + "epoch": 0.8440207972270364, + "grad_norm": 4.922083231124395, + "learning_rate": 1.8738311806059328e-07, + "loss": 0.5232, + "step": 20454 + }, + { + "epoch": 0.8440620615663943, + "grad_norm": 3.664992801292193, + "learning_rate": 1.8728610299473813e-07, + "loss": 0.5053, + "step": 20455 + }, + { + "epoch": 0.8441033259057522, + "grad_norm": 34.25646998815487, + "learning_rate": 1.871891113772366e-07, + "loss": 0.5549, + "step": 20456 + }, + { + "epoch": 0.8441445902451101, + "grad_norm": 2.113168613851604, + "learning_rate": 1.8709214320982177e-07, + "loss": 0.4738, + "step": 20457 + }, + { + "epoch": 0.8441858545844682, + "grad_norm": 2.8026624396172344, + "learning_rate": 1.8699519849422558e-07, + "loss": 0.4885, + "step": 20458 + }, + { + "epoch": 0.8442271189238261, + "grad_norm": 2.460141885152132, + "learning_rate": 1.8689827723218016e-07, + "loss": 0.499, + "step": 20459 + }, + { + "epoch": 0.844268383263184, + "grad_norm": 3.2128816831982587, + "learning_rate": 1.8680137942541613e-07, + "loss": 0.5075, + "step": 20460 + }, + { + "epoch": 0.8443096476025419, + "grad_norm": 3.0277300258693853, + "learning_rate": 1.8670450507566461e-07, + "loss": 0.5659, + "step": 20461 + }, + { + "epoch": 0.8443509119418998, + "grad_norm": 3.4179137842457177, + "learning_rate": 1.866076541846562e-07, + "loss": 0.5175, + "step": 20462 + }, + { + "epoch": 0.8443921762812577, + "grad_norm": 4.143328801530997, + "learning_rate": 1.8651082675412034e-07, + "loss": 0.4851, + "step": 20463 + }, + { + "epoch": 0.8444334406206156, + "grad_norm": 4.246067287076081, + "learning_rate": 1.86414022785787e-07, + "loss": 0.4669, + "step": 20464 + }, + { + "epoch": 0.8444747049599736, + "grad_norm": 4.785138504929952, + "learning_rate": 1.8631724228138532e-07, + "loss": 0.5481, + "step": 20465 + }, + { + "epoch": 0.8445159692993315, + "grad_norm": 4.957458154940611, + "learning_rate": 1.8622048524264434e-07, + "loss": 0.511, + "step": 20466 + }, + { + "epoch": 0.8445572336386894, + "grad_norm": 2.5377644129657044, + "learning_rate": 1.8612375167129175e-07, + "loss": 0.521, + "step": 20467 + }, + { + "epoch": 0.8445984979780474, + "grad_norm": 3.2295130595627928, + "learning_rate": 1.8602704156905615e-07, + "loss": 0.5103, + "step": 20468 + }, + { + "epoch": 0.8446397623174053, + "grad_norm": 2.76424867596881, + "learning_rate": 1.859303549376643e-07, + "loss": 0.4816, + "step": 20469 + }, + { + "epoch": 0.8446810266567633, + "grad_norm": 1.88439592513062, + "learning_rate": 1.8583369177884353e-07, + "loss": 0.5325, + "step": 20470 + }, + { + "epoch": 0.8447222909961212, + "grad_norm": 7.82494317561967, + "learning_rate": 1.857370520943209e-07, + "loss": 0.5447, + "step": 20471 + }, + { + "epoch": 0.8447635553354791, + "grad_norm": 4.8657302564021565, + "learning_rate": 1.8564043588582208e-07, + "loss": 0.4996, + "step": 20472 + }, + { + "epoch": 0.844804819674837, + "grad_norm": 2.468265955588816, + "learning_rate": 1.85543843155073e-07, + "loss": 0.4517, + "step": 20473 + }, + { + "epoch": 0.8448460840141949, + "grad_norm": 2.2001611453969017, + "learning_rate": 1.8544727390379913e-07, + "loss": 0.5714, + "step": 20474 + }, + { + "epoch": 0.8448873483535528, + "grad_norm": 3.140191168386692, + "learning_rate": 1.8535072813372588e-07, + "loss": 0.5267, + "step": 20475 + }, + { + "epoch": 0.8449286126929108, + "grad_norm": 4.97641303165121, + "learning_rate": 1.8525420584657709e-07, + "loss": 0.5238, + "step": 20476 + }, + { + "epoch": 0.8449698770322687, + "grad_norm": 5.551788461365129, + "learning_rate": 1.8515770704407702e-07, + "loss": 0.5416, + "step": 20477 + }, + { + "epoch": 0.8450111413716267, + "grad_norm": 3.9280604376800983, + "learning_rate": 1.8506123172794965e-07, + "loss": 0.557, + "step": 20478 + }, + { + "epoch": 0.8450524057109846, + "grad_norm": 9.131165453004837, + "learning_rate": 1.8496477989991838e-07, + "loss": 0.4716, + "step": 20479 + }, + { + "epoch": 0.8450936700503425, + "grad_norm": 2.8293074825684528, + "learning_rate": 1.8486835156170557e-07, + "loss": 0.4927, + "step": 20480 + }, + { + "epoch": 0.8451349343897004, + "grad_norm": 9.187354983481875, + "learning_rate": 1.8477194671503427e-07, + "loss": 0.5757, + "step": 20481 + }, + { + "epoch": 0.8451761987290584, + "grad_norm": 3.4554258762745804, + "learning_rate": 1.8467556536162583e-07, + "loss": 0.499, + "step": 20482 + }, + { + "epoch": 0.8452174630684163, + "grad_norm": 2.7023855942363437, + "learning_rate": 1.8457920750320267e-07, + "loss": 0.4997, + "step": 20483 + }, + { + "epoch": 0.8452587274077742, + "grad_norm": 2.513836748314746, + "learning_rate": 1.844828731414851e-07, + "loss": 0.594, + "step": 20484 + }, + { + "epoch": 0.8452999917471321, + "grad_norm": 3.5895586483779063, + "learning_rate": 1.843865622781944e-07, + "loss": 0.5054, + "step": 20485 + }, + { + "epoch": 0.84534125608649, + "grad_norm": 9.864881135248929, + "learning_rate": 1.8429027491505084e-07, + "loss": 0.478, + "step": 20486 + }, + { + "epoch": 0.8453825204258479, + "grad_norm": 3.6795781608346214, + "learning_rate": 1.8419401105377476e-07, + "loss": 0.4869, + "step": 20487 + }, + { + "epoch": 0.8454237847652059, + "grad_norm": 4.213748983982221, + "learning_rate": 1.840977706960849e-07, + "loss": 0.4881, + "step": 20488 + }, + { + "epoch": 0.8454650491045639, + "grad_norm": 3.5255330176291086, + "learning_rate": 1.8400155384370077e-07, + "loss": 0.5069, + "step": 20489 + }, + { + "epoch": 0.8455063134439218, + "grad_norm": 7.1744811262243875, + "learning_rate": 1.8390536049834094e-07, + "loss": 0.5231, + "step": 20490 + }, + { + "epoch": 0.8455475777832797, + "grad_norm": 2.4564672039144306, + "learning_rate": 1.8380919066172424e-07, + "loss": 0.5211, + "step": 20491 + }, + { + "epoch": 0.8455888421226376, + "grad_norm": 2.3065169976579845, + "learning_rate": 1.8371304433556745e-07, + "loss": 0.5085, + "step": 20492 + }, + { + "epoch": 0.8456301064619955, + "grad_norm": 2.7289707354693253, + "learning_rate": 1.836169215215887e-07, + "loss": 0.4995, + "step": 20493 + }, + { + "epoch": 0.8456713708013535, + "grad_norm": 3.20490995585495, + "learning_rate": 1.8352082222150513e-07, + "loss": 0.4784, + "step": 20494 + }, + { + "epoch": 0.8457126351407114, + "grad_norm": 48.12276481587004, + "learning_rate": 1.834247464370325e-07, + "loss": 0.5943, + "step": 20495 + }, + { + "epoch": 0.8457538994800693, + "grad_norm": 3.0950780857011058, + "learning_rate": 1.8332869416988796e-07, + "loss": 0.5597, + "step": 20496 + }, + { + "epoch": 0.8457951638194272, + "grad_norm": 2.8214748971557597, + "learning_rate": 1.8323266542178645e-07, + "loss": 0.454, + "step": 20497 + }, + { + "epoch": 0.8458364281587851, + "grad_norm": 2.3285757415249098, + "learning_rate": 1.8313666019444347e-07, + "loss": 0.4983, + "step": 20498 + }, + { + "epoch": 0.8458776924981432, + "grad_norm": 3.153099149679945, + "learning_rate": 1.8304067848957446e-07, + "loss": 0.5488, + "step": 20499 + }, + { + "epoch": 0.8459189568375011, + "grad_norm": 4.095912942397865, + "learning_rate": 1.8294472030889303e-07, + "loss": 0.5206, + "step": 20500 + }, + { + "epoch": 0.845960221176859, + "grad_norm": 3.2857365329160606, + "learning_rate": 1.8284878565411366e-07, + "loss": 0.3984, + "step": 20501 + }, + { + "epoch": 0.8460014855162169, + "grad_norm": 2.443119491669717, + "learning_rate": 1.8275287452695e-07, + "loss": 0.5538, + "step": 20502 + }, + { + "epoch": 0.8460427498555748, + "grad_norm": 2.3844148802879888, + "learning_rate": 1.8265698692911565e-07, + "loss": 0.4656, + "step": 20503 + }, + { + "epoch": 0.8460840141949327, + "grad_norm": 2.8845540636732014, + "learning_rate": 1.8256112286232256e-07, + "loss": 0.514, + "step": 20504 + }, + { + "epoch": 0.8461252785342906, + "grad_norm": 4.844087819670445, + "learning_rate": 1.8246528232828358e-07, + "loss": 0.5317, + "step": 20505 + }, + { + "epoch": 0.8461665428736486, + "grad_norm": 8.774914305861417, + "learning_rate": 1.823694653287108e-07, + "loss": 0.4953, + "step": 20506 + }, + { + "epoch": 0.8462078072130065, + "grad_norm": 3.037101356624935, + "learning_rate": 1.8227367186531568e-07, + "loss": 0.4888, + "step": 20507 + }, + { + "epoch": 0.8462490715523644, + "grad_norm": 4.186597976024756, + "learning_rate": 1.8217790193980922e-07, + "loss": 0.4918, + "step": 20508 + }, + { + "epoch": 0.8462903358917224, + "grad_norm": 3.4849200056106766, + "learning_rate": 1.82082155553902e-07, + "loss": 0.5158, + "step": 20509 + }, + { + "epoch": 0.8463316002310803, + "grad_norm": 3.716697629471027, + "learning_rate": 1.819864327093042e-07, + "loss": 0.4921, + "step": 20510 + }, + { + "epoch": 0.8463728645704383, + "grad_norm": 2.716987843005957, + "learning_rate": 1.8189073340772593e-07, + "loss": 0.5086, + "step": 20511 + }, + { + "epoch": 0.8464141289097962, + "grad_norm": 3.442813350469655, + "learning_rate": 1.81795057650877e-07, + "loss": 0.505, + "step": 20512 + }, + { + "epoch": 0.8464553932491541, + "grad_norm": 2.4306039259639753, + "learning_rate": 1.8169940544046553e-07, + "loss": 0.5321, + "step": 20513 + }, + { + "epoch": 0.846496657588512, + "grad_norm": 3.5289907751572964, + "learning_rate": 1.8160377677820067e-07, + "loss": 0.4949, + "step": 20514 + }, + { + "epoch": 0.8465379219278699, + "grad_norm": 7.177062866178296, + "learning_rate": 1.8150817166579037e-07, + "loss": 0.5086, + "step": 20515 + }, + { + "epoch": 0.8465791862672278, + "grad_norm": 2.671769771953219, + "learning_rate": 1.8141259010494294e-07, + "loss": 0.473, + "step": 20516 + }, + { + "epoch": 0.8466204506065858, + "grad_norm": 2.1685157264181747, + "learning_rate": 1.8131703209736484e-07, + "loss": 0.5004, + "step": 20517 + }, + { + "epoch": 0.8466617149459437, + "grad_norm": 2.4204468897101754, + "learning_rate": 1.8122149764476337e-07, + "loss": 0.5212, + "step": 20518 + }, + { + "epoch": 0.8467029792853017, + "grad_norm": 3.0633167148327485, + "learning_rate": 1.81125986748845e-07, + "loss": 0.4978, + "step": 20519 + }, + { + "epoch": 0.8467442436246596, + "grad_norm": 4.781677343594773, + "learning_rate": 1.8103049941131622e-07, + "loss": 0.4981, + "step": 20520 + }, + { + "epoch": 0.8467855079640175, + "grad_norm": 2.6782976493932016, + "learning_rate": 1.8093503563388213e-07, + "loss": 0.4951, + "step": 20521 + }, + { + "epoch": 0.8468267723033754, + "grad_norm": 9.695622747460906, + "learning_rate": 1.8083959541824807e-07, + "loss": 0.5307, + "step": 20522 + }, + { + "epoch": 0.8468680366427334, + "grad_norm": 3.015137337961357, + "learning_rate": 1.8074417876611865e-07, + "loss": 0.5242, + "step": 20523 + }, + { + "epoch": 0.8469093009820913, + "grad_norm": 3.4150427080550507, + "learning_rate": 1.8064878567919885e-07, + "loss": 0.4783, + "step": 20524 + }, + { + "epoch": 0.8469505653214492, + "grad_norm": 4.403874577732413, + "learning_rate": 1.8055341615919197e-07, + "loss": 0.5377, + "step": 20525 + }, + { + "epoch": 0.8469918296608071, + "grad_norm": 28.668385252529546, + "learning_rate": 1.8045807020780164e-07, + "loss": 0.4582, + "step": 20526 + }, + { + "epoch": 0.847033094000165, + "grad_norm": 3.7827418349044333, + "learning_rate": 1.8036274782673133e-07, + "loss": 0.5106, + "step": 20527 + }, + { + "epoch": 0.8470743583395229, + "grad_norm": 4.018750690808617, + "learning_rate": 1.8026744901768387e-07, + "loss": 0.4877, + "step": 20528 + }, + { + "epoch": 0.847115622678881, + "grad_norm": 4.246138303629359, + "learning_rate": 1.801721737823609e-07, + "loss": 0.5126, + "step": 20529 + }, + { + "epoch": 0.8471568870182389, + "grad_norm": 2.3189888222290738, + "learning_rate": 1.8007692212246467e-07, + "loss": 0.5135, + "step": 20530 + }, + { + "epoch": 0.8471981513575968, + "grad_norm": 3.3354608553686367, + "learning_rate": 1.7998169403969655e-07, + "loss": 0.4353, + "step": 20531 + }, + { + "epoch": 0.8472394156969547, + "grad_norm": 6.173450200361449, + "learning_rate": 1.7988648953575782e-07, + "loss": 0.4678, + "step": 20532 + }, + { + "epoch": 0.8472806800363126, + "grad_norm": 9.092020953534107, + "learning_rate": 1.7979130861234844e-07, + "loss": 0.5316, + "step": 20533 + }, + { + "epoch": 0.8473219443756705, + "grad_norm": 3.351096274714225, + "learning_rate": 1.7969615127116922e-07, + "loss": 0.5117, + "step": 20534 + }, + { + "epoch": 0.8473632087150285, + "grad_norm": 2.825595543082527, + "learning_rate": 1.796010175139195e-07, + "loss": 0.4717, + "step": 20535 + }, + { + "epoch": 0.8474044730543864, + "grad_norm": 4.626743525501086, + "learning_rate": 1.7950590734229856e-07, + "loss": 0.5007, + "step": 20536 + }, + { + "epoch": 0.8474457373937443, + "grad_norm": 16.031135300017755, + "learning_rate": 1.7941082075800603e-07, + "loss": 0.4829, + "step": 20537 + }, + { + "epoch": 0.8474870017331022, + "grad_norm": 2.5944421320008537, + "learning_rate": 1.7931575776273939e-07, + "loss": 0.5316, + "step": 20538 + }, + { + "epoch": 0.8475282660724602, + "grad_norm": 3.705697101995242, + "learning_rate": 1.7922071835819715e-07, + "loss": 0.491, + "step": 20539 + }, + { + "epoch": 0.8475695304118182, + "grad_norm": 3.982255998611274, + "learning_rate": 1.7912570254607723e-07, + "loss": 0.5446, + "step": 20540 + }, + { + "epoch": 0.8476107947511761, + "grad_norm": 4.471030821228837, + "learning_rate": 1.7903071032807634e-07, + "loss": 0.5394, + "step": 20541 + }, + { + "epoch": 0.847652059090534, + "grad_norm": 2.5081957896182256, + "learning_rate": 1.7893574170589155e-07, + "loss": 0.4962, + "step": 20542 + }, + { + "epoch": 0.8476933234298919, + "grad_norm": 3.528278035891629, + "learning_rate": 1.7884079668121921e-07, + "loss": 0.4921, + "step": 20543 + }, + { + "epoch": 0.8477345877692498, + "grad_norm": 10.763717594616416, + "learning_rate": 1.7874587525575549e-07, + "loss": 0.5075, + "step": 20544 + }, + { + "epoch": 0.8477758521086077, + "grad_norm": 2.4467495182140975, + "learning_rate": 1.7865097743119546e-07, + "loss": 0.5258, + "step": 20545 + }, + { + "epoch": 0.8478171164479656, + "grad_norm": 6.924096852945448, + "learning_rate": 1.7855610320923448e-07, + "loss": 0.4962, + "step": 20546 + }, + { + "epoch": 0.8478583807873236, + "grad_norm": 6.0585696968154545, + "learning_rate": 1.7846125259156753e-07, + "loss": 0.5291, + "step": 20547 + }, + { + "epoch": 0.8478996451266815, + "grad_norm": 10.224668318454027, + "learning_rate": 1.7836642557988824e-07, + "loss": 0.501, + "step": 20548 + }, + { + "epoch": 0.8479409094660394, + "grad_norm": 68.87341514898053, + "learning_rate": 1.7827162217589126e-07, + "loss": 0.483, + "step": 20549 + }, + { + "epoch": 0.8479821738053974, + "grad_norm": 2.330729240544714, + "learning_rate": 1.7817684238126925e-07, + "loss": 0.5116, + "step": 20550 + }, + { + "epoch": 0.8480234381447553, + "grad_norm": 10.45727702275412, + "learning_rate": 1.7808208619771548e-07, + "loss": 0.4835, + "step": 20551 + }, + { + "epoch": 0.8480647024841133, + "grad_norm": 2.632343988104489, + "learning_rate": 1.779873536269225e-07, + "loss": 0.4652, + "step": 20552 + }, + { + "epoch": 0.8481059668234712, + "grad_norm": 2.9508416612344264, + "learning_rate": 1.7789264467058285e-07, + "loss": 0.5304, + "step": 20553 + }, + { + "epoch": 0.8481472311628291, + "grad_norm": 5.966451676988023, + "learning_rate": 1.777979593303878e-07, + "loss": 0.5543, + "step": 20554 + }, + { + "epoch": 0.848188495502187, + "grad_norm": 4.4053490484714874, + "learning_rate": 1.777032976080289e-07, + "loss": 0.5047, + "step": 20555 + }, + { + "epoch": 0.8482297598415449, + "grad_norm": 2.6770492137501596, + "learning_rate": 1.7760865950519683e-07, + "loss": 0.4876, + "step": 20556 + }, + { + "epoch": 0.8482710241809028, + "grad_norm": 6.065288833046019, + "learning_rate": 1.7751404502358242e-07, + "loss": 0.5242, + "step": 20557 + }, + { + "epoch": 0.8483122885202607, + "grad_norm": 3.8708330221430707, + "learning_rate": 1.7741945416487548e-07, + "loss": 0.6148, + "step": 20558 + }, + { + "epoch": 0.8483535528596187, + "grad_norm": 4.561292832347407, + "learning_rate": 1.7732488693076548e-07, + "loss": 0.4722, + "step": 20559 + }, + { + "epoch": 0.8483948171989767, + "grad_norm": 4.061433032878569, + "learning_rate": 1.7723034332294225e-07, + "loss": 0.5075, + "step": 20560 + }, + { + "epoch": 0.8484360815383346, + "grad_norm": 2.569879935836789, + "learning_rate": 1.7713582334309408e-07, + "loss": 0.5197, + "step": 20561 + }, + { + "epoch": 0.8484773458776925, + "grad_norm": 3.398702308479117, + "learning_rate": 1.7704132699290914e-07, + "loss": 0.6003, + "step": 20562 + }, + { + "epoch": 0.8485186102170504, + "grad_norm": 4.327019349912659, + "learning_rate": 1.769468542740756e-07, + "loss": 0.4906, + "step": 20563 + }, + { + "epoch": 0.8485598745564084, + "grad_norm": 4.836336300997341, + "learning_rate": 1.7685240518828107e-07, + "loss": 0.5319, + "step": 20564 + }, + { + "epoch": 0.8486011388957663, + "grad_norm": 2.9843510106790525, + "learning_rate": 1.7675797973721285e-07, + "loss": 0.465, + "step": 20565 + }, + { + "epoch": 0.8486424032351242, + "grad_norm": 4.52961676014122, + "learning_rate": 1.76663577922557e-07, + "loss": 0.5428, + "step": 20566 + }, + { + "epoch": 0.8486836675744821, + "grad_norm": 2.421842164856409, + "learning_rate": 1.7656919974600027e-07, + "loss": 0.5032, + "step": 20567 + }, + { + "epoch": 0.84872493191384, + "grad_norm": 4.579470771310521, + "learning_rate": 1.764748452092282e-07, + "loss": 0.4835, + "step": 20568 + }, + { + "epoch": 0.8487661962531979, + "grad_norm": 3.7127530456339546, + "learning_rate": 1.7638051431392672e-07, + "loss": 0.4753, + "step": 20569 + }, + { + "epoch": 0.848807460592556, + "grad_norm": 5.18458959793317, + "learning_rate": 1.7628620706178018e-07, + "loss": 0.4984, + "step": 20570 + }, + { + "epoch": 0.8488487249319139, + "grad_norm": 2.674794405233589, + "learning_rate": 1.761919234544734e-07, + "loss": 0.5682, + "step": 20571 + }, + { + "epoch": 0.8488899892712718, + "grad_norm": 2.2474621336866787, + "learning_rate": 1.760976634936905e-07, + "loss": 0.4931, + "step": 20572 + }, + { + "epoch": 0.8489312536106297, + "grad_norm": 5.347809442307341, + "learning_rate": 1.760034271811155e-07, + "loss": 0.5066, + "step": 20573 + }, + { + "epoch": 0.8489725179499876, + "grad_norm": 2.0657822022084282, + "learning_rate": 1.7590921451843138e-07, + "loss": 0.4929, + "step": 20574 + }, + { + "epoch": 0.8490137822893455, + "grad_norm": 2.86512436243318, + "learning_rate": 1.758150255073208e-07, + "loss": 0.5347, + "step": 20575 + }, + { + "epoch": 0.8490550466287035, + "grad_norm": 2.6113406386720017, + "learning_rate": 1.757208601494666e-07, + "loss": 0.4974, + "step": 20576 + }, + { + "epoch": 0.8490963109680614, + "grad_norm": 3.954754593295653, + "learning_rate": 1.7562671844655087e-07, + "loss": 0.521, + "step": 20577 + }, + { + "epoch": 0.8491375753074193, + "grad_norm": 2.552492503995385, + "learning_rate": 1.7553260040025464e-07, + "loss": 0.4757, + "step": 20578 + }, + { + "epoch": 0.8491788396467772, + "grad_norm": 3.804832180283623, + "learning_rate": 1.7543850601225958e-07, + "loss": 0.4908, + "step": 20579 + }, + { + "epoch": 0.8492201039861352, + "grad_norm": 5.187600101639466, + "learning_rate": 1.753444352842463e-07, + "loss": 0.5024, + "step": 20580 + }, + { + "epoch": 0.8492613683254931, + "grad_norm": 3.2192728742579013, + "learning_rate": 1.752503882178955e-07, + "loss": 0.5456, + "step": 20581 + }, + { + "epoch": 0.8493026326648511, + "grad_norm": 2.2529630835087873, + "learning_rate": 1.751563648148865e-07, + "loss": 0.4797, + "step": 20582 + }, + { + "epoch": 0.849343897004209, + "grad_norm": 11.72648976763424, + "learning_rate": 1.750623650768991e-07, + "loss": 0.484, + "step": 20583 + }, + { + "epoch": 0.8493851613435669, + "grad_norm": 4.293559933000109, + "learning_rate": 1.749683890056123e-07, + "loss": 0.5165, + "step": 20584 + }, + { + "epoch": 0.8494264256829248, + "grad_norm": 3.848993030063703, + "learning_rate": 1.7487443660270508e-07, + "loss": 0.5277, + "step": 20585 + }, + { + "epoch": 0.8494676900222827, + "grad_norm": 3.968292822509146, + "learning_rate": 1.7478050786985545e-07, + "loss": 0.4951, + "step": 20586 + }, + { + "epoch": 0.8495089543616406, + "grad_norm": 7.602763619579128, + "learning_rate": 1.7468660280874088e-07, + "loss": 0.5234, + "step": 20587 + }, + { + "epoch": 0.8495502187009986, + "grad_norm": 4.328027624295907, + "learning_rate": 1.7459272142103888e-07, + "loss": 0.4434, + "step": 20588 + }, + { + "epoch": 0.8495914830403565, + "grad_norm": 2.5781035927093074, + "learning_rate": 1.744988637084266e-07, + "loss": 0.5308, + "step": 20589 + }, + { + "epoch": 0.8496327473797145, + "grad_norm": 9.707702138232909, + "learning_rate": 1.7440502967258072e-07, + "loss": 0.5372, + "step": 20590 + }, + { + "epoch": 0.8496740117190724, + "grad_norm": 3.609231832625761, + "learning_rate": 1.7431121931517702e-07, + "loss": 0.5747, + "step": 20591 + }, + { + "epoch": 0.8497152760584303, + "grad_norm": 5.047182184621689, + "learning_rate": 1.742174326378912e-07, + "loss": 0.524, + "step": 20592 + }, + { + "epoch": 0.8497565403977883, + "grad_norm": 2.7046857755556495, + "learning_rate": 1.7412366964239857e-07, + "loss": 0.5013, + "step": 20593 + }, + { + "epoch": 0.8497978047371462, + "grad_norm": 3.666082519751979, + "learning_rate": 1.7402993033037444e-07, + "loss": 0.5113, + "step": 20594 + }, + { + "epoch": 0.8498390690765041, + "grad_norm": 8.997616959449118, + "learning_rate": 1.7393621470349253e-07, + "loss": 0.514, + "step": 20595 + }, + { + "epoch": 0.849880333415862, + "grad_norm": 2.984979035297327, + "learning_rate": 1.738425227634271e-07, + "loss": 0.4873, + "step": 20596 + }, + { + "epoch": 0.8499215977552199, + "grad_norm": 3.1092700474328465, + "learning_rate": 1.737488545118517e-07, + "loss": 0.4889, + "step": 20597 + }, + { + "epoch": 0.8499628620945778, + "grad_norm": 4.703574533565807, + "learning_rate": 1.7365520995043975e-07, + "loss": 0.536, + "step": 20598 + }, + { + "epoch": 0.8500041264339357, + "grad_norm": 123.5324893141219, + "learning_rate": 1.73561589080864e-07, + "loss": 0.5114, + "step": 20599 + }, + { + "epoch": 0.8500453907732938, + "grad_norm": 4.158159535496384, + "learning_rate": 1.7346799190479606e-07, + "loss": 0.4968, + "step": 20600 + }, + { + "epoch": 0.8500866551126517, + "grad_norm": 3.889480561196034, + "learning_rate": 1.733744184239083e-07, + "loss": 0.4936, + "step": 20601 + }, + { + "epoch": 0.8501279194520096, + "grad_norm": 7.086619818409026, + "learning_rate": 1.732808686398723e-07, + "loss": 0.5872, + "step": 20602 + }, + { + "epoch": 0.8501691837913675, + "grad_norm": 5.1217902243845534, + "learning_rate": 1.7318734255435882e-07, + "loss": 0.5389, + "step": 20603 + }, + { + "epoch": 0.8502104481307254, + "grad_norm": 2.501464241465843, + "learning_rate": 1.7309384016903845e-07, + "loss": 0.4677, + "step": 20604 + }, + { + "epoch": 0.8502517124700834, + "grad_norm": 2.8933388066614447, + "learning_rate": 1.7300036148558158e-07, + "loss": 0.486, + "step": 20605 + }, + { + "epoch": 0.8502929768094413, + "grad_norm": 2.834222851218525, + "learning_rate": 1.7290690650565816e-07, + "loss": 0.4737, + "step": 20606 + }, + { + "epoch": 0.8503342411487992, + "grad_norm": 8.529815622590416, + "learning_rate": 1.7281347523093705e-07, + "loss": 0.4905, + "step": 20607 + }, + { + "epoch": 0.8503755054881571, + "grad_norm": 2.884496542690078, + "learning_rate": 1.7272006766308723e-07, + "loss": 0.4896, + "step": 20608 + }, + { + "epoch": 0.850416769827515, + "grad_norm": 2.371219136571245, + "learning_rate": 1.7262668380377756e-07, + "loss": 0.5652, + "step": 20609 + }, + { + "epoch": 0.8504580341668729, + "grad_norm": 4.843299094626067, + "learning_rate": 1.72533323654676e-07, + "loss": 0.5406, + "step": 20610 + }, + { + "epoch": 0.850499298506231, + "grad_norm": 7.3804242171749985, + "learning_rate": 1.724399872174499e-07, + "loss": 0.5071, + "step": 20611 + }, + { + "epoch": 0.8505405628455889, + "grad_norm": 5.837593704889522, + "learning_rate": 1.7234667449376662e-07, + "loss": 0.4603, + "step": 20612 + }, + { + "epoch": 0.8505818271849468, + "grad_norm": 1.9793629967845154, + "learning_rate": 1.7225338548529328e-07, + "loss": 0.4961, + "step": 20613 + }, + { + "epoch": 0.8506230915243047, + "grad_norm": 3.582264936711412, + "learning_rate": 1.7216012019369575e-07, + "loss": 0.4886, + "step": 20614 + }, + { + "epoch": 0.8506643558636626, + "grad_norm": 3.5946054125035594, + "learning_rate": 1.720668786206407e-07, + "loss": 0.5025, + "step": 20615 + }, + { + "epoch": 0.8507056202030205, + "grad_norm": 4.09346860878587, + "learning_rate": 1.7197366076779258e-07, + "loss": 0.5065, + "step": 20616 + }, + { + "epoch": 0.8507468845423785, + "grad_norm": 2.424480157365892, + "learning_rate": 1.718804666368173e-07, + "loss": 0.4782, + "step": 20617 + }, + { + "epoch": 0.8507881488817364, + "grad_norm": 2.1847688746819953, + "learning_rate": 1.7178729622937945e-07, + "loss": 0.5052, + "step": 20618 + }, + { + "epoch": 0.8508294132210943, + "grad_norm": 2.8416862658431867, + "learning_rate": 1.716941495471429e-07, + "loss": 0.5369, + "step": 20619 + }, + { + "epoch": 0.8508706775604522, + "grad_norm": 2.9332138232427774, + "learning_rate": 1.7160102659177186e-07, + "loss": 0.5001, + "step": 20620 + }, + { + "epoch": 0.8509119418998102, + "grad_norm": 1.9316501627005975, + "learning_rate": 1.715079273649296e-07, + "loss": 0.4515, + "step": 20621 + }, + { + "epoch": 0.8509532062391681, + "grad_norm": 4.298093333811215, + "learning_rate": 1.7141485186827933e-07, + "loss": 0.6071, + "step": 20622 + }, + { + "epoch": 0.8509944705785261, + "grad_norm": 3.211519068252615, + "learning_rate": 1.713218001034832e-07, + "loss": 0.5023, + "step": 20623 + }, + { + "epoch": 0.851035734917884, + "grad_norm": 3.3138251801063148, + "learning_rate": 1.7122877207220355e-07, + "loss": 0.5049, + "step": 20624 + }, + { + "epoch": 0.8510769992572419, + "grad_norm": 2.745905739629637, + "learning_rate": 1.7113576777610223e-07, + "loss": 0.5239, + "step": 20625 + }, + { + "epoch": 0.8511182635965998, + "grad_norm": 5.438303928128625, + "learning_rate": 1.7104278721684057e-07, + "loss": 0.4814, + "step": 20626 + }, + { + "epoch": 0.8511595279359577, + "grad_norm": 4.807803179683423, + "learning_rate": 1.7094983039607925e-07, + "loss": 0.521, + "step": 20627 + }, + { + "epoch": 0.8512007922753156, + "grad_norm": 7.722867075744538, + "learning_rate": 1.708568973154786e-07, + "loss": 0.4903, + "step": 20628 + }, + { + "epoch": 0.8512420566146736, + "grad_norm": 3.569587728343647, + "learning_rate": 1.7076398797669862e-07, + "loss": 0.5195, + "step": 20629 + }, + { + "epoch": 0.8512833209540315, + "grad_norm": 5.943299727043395, + "learning_rate": 1.7067110238139898e-07, + "loss": 0.473, + "step": 20630 + }, + { + "epoch": 0.8513245852933895, + "grad_norm": 13.416452030770246, + "learning_rate": 1.7057824053123938e-07, + "loss": 0.5104, + "step": 20631 + }, + { + "epoch": 0.8513658496327474, + "grad_norm": 3.5761350216615364, + "learning_rate": 1.704854024278778e-07, + "loss": 0.524, + "step": 20632 + }, + { + "epoch": 0.8514071139721053, + "grad_norm": 7.954728667962235, + "learning_rate": 1.7039258807297276e-07, + "loss": 0.5283, + "step": 20633 + }, + { + "epoch": 0.8514483783114632, + "grad_norm": 2.4602261552423896, + "learning_rate": 1.7029979746818241e-07, + "loss": 0.4701, + "step": 20634 + }, + { + "epoch": 0.8514896426508212, + "grad_norm": 4.355480279038106, + "learning_rate": 1.7020703061516428e-07, + "loss": 0.4776, + "step": 20635 + }, + { + "epoch": 0.8515309069901791, + "grad_norm": 2.6408006154169894, + "learning_rate": 1.701142875155749e-07, + "loss": 0.5107, + "step": 20636 + }, + { + "epoch": 0.851572171329537, + "grad_norm": 2.79042014457552, + "learning_rate": 1.7002156817107123e-07, + "loss": 0.5073, + "step": 20637 + }, + { + "epoch": 0.8516134356688949, + "grad_norm": 2.3832951746344877, + "learning_rate": 1.699288725833098e-07, + "loss": 0.5533, + "step": 20638 + }, + { + "epoch": 0.8516547000082528, + "grad_norm": 5.279958040005978, + "learning_rate": 1.6983620075394596e-07, + "loss": 0.5165, + "step": 20639 + }, + { + "epoch": 0.8516959643476107, + "grad_norm": 4.057271711851423, + "learning_rate": 1.6974355268463487e-07, + "loss": 0.546, + "step": 20640 + }, + { + "epoch": 0.8517372286869688, + "grad_norm": 17.863656344266964, + "learning_rate": 1.6965092837703172e-07, + "loss": 0.5378, + "step": 20641 + }, + { + "epoch": 0.8517784930263267, + "grad_norm": 3.4041256929827237, + "learning_rate": 1.6955832783279102e-07, + "loss": 0.5535, + "step": 20642 + }, + { + "epoch": 0.8518197573656846, + "grad_norm": 2.813015272559579, + "learning_rate": 1.694657510535671e-07, + "loss": 0.504, + "step": 20643 + }, + { + "epoch": 0.8518610217050425, + "grad_norm": 2.765555126547218, + "learning_rate": 1.6937319804101314e-07, + "loss": 0.4754, + "step": 20644 + }, + { + "epoch": 0.8519022860444004, + "grad_norm": 2.8592080489937035, + "learning_rate": 1.692806687967825e-07, + "loss": 0.5361, + "step": 20645 + }, + { + "epoch": 0.8519435503837584, + "grad_norm": 2.854100666598673, + "learning_rate": 1.6918816332252818e-07, + "loss": 0.4793, + "step": 20646 + }, + { + "epoch": 0.8519848147231163, + "grad_norm": 3.028253994887551, + "learning_rate": 1.6909568161990268e-07, + "loss": 0.5369, + "step": 20647 + }, + { + "epoch": 0.8520260790624742, + "grad_norm": 3.574428212262692, + "learning_rate": 1.6900322369055738e-07, + "loss": 0.5059, + "step": 20648 + }, + { + "epoch": 0.8520673434018321, + "grad_norm": 6.019702423849611, + "learning_rate": 1.689107895361441e-07, + "loss": 0.5074, + "step": 20649 + }, + { + "epoch": 0.85210860774119, + "grad_norm": 3.0603504009719495, + "learning_rate": 1.6881837915831416e-07, + "loss": 0.534, + "step": 20650 + }, + { + "epoch": 0.852149872080548, + "grad_norm": 3.3358980230089412, + "learning_rate": 1.6872599255871812e-07, + "loss": 0.5923, + "step": 20651 + }, + { + "epoch": 0.852191136419906, + "grad_norm": 3.8879811990167936, + "learning_rate": 1.6863362973900647e-07, + "loss": 0.5556, + "step": 20652 + }, + { + "epoch": 0.8522324007592639, + "grad_norm": 5.417240331209304, + "learning_rate": 1.6854129070082825e-07, + "loss": 0.5331, + "step": 20653 + }, + { + "epoch": 0.8522736650986218, + "grad_norm": 2.938947293206023, + "learning_rate": 1.6844897544583359e-07, + "loss": 0.5616, + "step": 20654 + }, + { + "epoch": 0.8523149294379797, + "grad_norm": 4.445871336510074, + "learning_rate": 1.6835668397567138e-07, + "loss": 0.5491, + "step": 20655 + }, + { + "epoch": 0.8523561937773376, + "grad_norm": 2.7069312535362084, + "learning_rate": 1.682644162919898e-07, + "loss": 0.4948, + "step": 20656 + }, + { + "epoch": 0.8523974581166955, + "grad_norm": 3.3080609895651456, + "learning_rate": 1.6817217239643717e-07, + "loss": 0.5305, + "step": 20657 + }, + { + "epoch": 0.8524387224560535, + "grad_norm": 3.902571702154185, + "learning_rate": 1.6807995229066136e-07, + "loss": 0.5285, + "step": 20658 + }, + { + "epoch": 0.8524799867954114, + "grad_norm": 2.308430791250509, + "learning_rate": 1.679877559763099e-07, + "loss": 0.5411, + "step": 20659 + }, + { + "epoch": 0.8525212511347693, + "grad_norm": 2.13351144555868, + "learning_rate": 1.678955834550289e-07, + "loss": 0.5299, + "step": 20660 + }, + { + "epoch": 0.8525625154741273, + "grad_norm": 6.677125679634408, + "learning_rate": 1.6780343472846532e-07, + "loss": 0.501, + "step": 20661 + }, + { + "epoch": 0.8526037798134852, + "grad_norm": 2.677264398582028, + "learning_rate": 1.6771130979826494e-07, + "loss": 0.4807, + "step": 20662 + }, + { + "epoch": 0.8526450441528431, + "grad_norm": 4.071565402858536, + "learning_rate": 1.6761920866607378e-07, + "loss": 0.544, + "step": 20663 + }, + { + "epoch": 0.8526863084922011, + "grad_norm": 3.9115263198018395, + "learning_rate": 1.675271313335362e-07, + "loss": 0.5217, + "step": 20664 + }, + { + "epoch": 0.852727572831559, + "grad_norm": 6.552421341587817, + "learning_rate": 1.6743507780229789e-07, + "loss": 0.5657, + "step": 20665 + }, + { + "epoch": 0.8527688371709169, + "grad_norm": 2.0915805410105075, + "learning_rate": 1.6734304807400236e-07, + "loss": 0.4862, + "step": 20666 + }, + { + "epoch": 0.8528101015102748, + "grad_norm": 2.445230775095059, + "learning_rate": 1.6725104215029362e-07, + "loss": 0.5195, + "step": 20667 + }, + { + "epoch": 0.8528513658496327, + "grad_norm": 2.4990684511629326, + "learning_rate": 1.6715906003281572e-07, + "loss": 0.4924, + "step": 20668 + }, + { + "epoch": 0.8528926301889906, + "grad_norm": 2.889175122263383, + "learning_rate": 1.67067101723211e-07, + "loss": 0.5093, + "step": 20669 + }, + { + "epoch": 0.8529338945283486, + "grad_norm": 3.241662430517137, + "learning_rate": 1.669751672231223e-07, + "loss": 0.577, + "step": 20670 + }, + { + "epoch": 0.8529751588677065, + "grad_norm": 4.6915362115115435, + "learning_rate": 1.6688325653419179e-07, + "loss": 0.6143, + "step": 20671 + }, + { + "epoch": 0.8530164232070645, + "grad_norm": 4.535490179738666, + "learning_rate": 1.6679136965806153e-07, + "loss": 0.5196, + "step": 20672 + }, + { + "epoch": 0.8530576875464224, + "grad_norm": 12.033885023328745, + "learning_rate": 1.6669950659637235e-07, + "loss": 0.4927, + "step": 20673 + }, + { + "epoch": 0.8530989518857803, + "grad_norm": 10.944525440009487, + "learning_rate": 1.6660766735076544e-07, + "loss": 0.5125, + "step": 20674 + }, + { + "epoch": 0.8531402162251382, + "grad_norm": 2.9150853849263623, + "learning_rate": 1.6651585192288117e-07, + "loss": 0.4947, + "step": 20675 + }, + { + "epoch": 0.8531814805644962, + "grad_norm": 15.295841978166145, + "learning_rate": 1.6642406031435985e-07, + "loss": 0.5486, + "step": 20676 + }, + { + "epoch": 0.8532227449038541, + "grad_norm": 4.573247846275179, + "learning_rate": 1.6633229252684074e-07, + "loss": 0.4973, + "step": 20677 + }, + { + "epoch": 0.853264009243212, + "grad_norm": 2.851420049561848, + "learning_rate": 1.6624054856196347e-07, + "loss": 0.5456, + "step": 20678 + }, + { + "epoch": 0.8533052735825699, + "grad_norm": 5.134637782417878, + "learning_rate": 1.6614882842136625e-07, + "loss": 0.5015, + "step": 20679 + }, + { + "epoch": 0.8533465379219278, + "grad_norm": 4.364205734443333, + "learning_rate": 1.660571321066881e-07, + "loss": 0.5005, + "step": 20680 + }, + { + "epoch": 0.8533878022612857, + "grad_norm": 2.36106999985332, + "learning_rate": 1.659654596195662e-07, + "loss": 0.5062, + "step": 20681 + }, + { + "epoch": 0.8534290666006438, + "grad_norm": 2.6251117293269908, + "learning_rate": 1.6587381096163846e-07, + "loss": 0.5226, + "step": 20682 + }, + { + "epoch": 0.8534703309400017, + "grad_norm": 6.541759505661083, + "learning_rate": 1.6578218613454198e-07, + "loss": 0.4864, + "step": 20683 + }, + { + "epoch": 0.8535115952793596, + "grad_norm": 3.4340527388927047, + "learning_rate": 1.6569058513991354e-07, + "loss": 0.5264, + "step": 20684 + }, + { + "epoch": 0.8535528596187175, + "grad_norm": 4.8766998820381415, + "learning_rate": 1.6559900797938894e-07, + "loss": 0.4903, + "step": 20685 + }, + { + "epoch": 0.8535941239580754, + "grad_norm": 4.235307443210667, + "learning_rate": 1.655074546546042e-07, + "loss": 0.5212, + "step": 20686 + }, + { + "epoch": 0.8536353882974334, + "grad_norm": 2.3054029208705957, + "learning_rate": 1.6541592516719472e-07, + "loss": 0.5143, + "step": 20687 + }, + { + "epoch": 0.8536766526367913, + "grad_norm": 2.8150631963485524, + "learning_rate": 1.6532441951879584e-07, + "loss": 0.5055, + "step": 20688 + }, + { + "epoch": 0.8537179169761492, + "grad_norm": 3.812672254455499, + "learning_rate": 1.6523293771104124e-07, + "loss": 0.5838, + "step": 20689 + }, + { + "epoch": 0.8537591813155071, + "grad_norm": 76.44008361429447, + "learning_rate": 1.651414797455655e-07, + "loss": 0.4818, + "step": 20690 + }, + { + "epoch": 0.853800445654865, + "grad_norm": 3.8253066600469827, + "learning_rate": 1.6505004562400223e-07, + "loss": 0.5428, + "step": 20691 + }, + { + "epoch": 0.853841709994223, + "grad_norm": 13.502570220413807, + "learning_rate": 1.64958635347985e-07, + "loss": 0.5359, + "step": 20692 + }, + { + "epoch": 0.853882974333581, + "grad_norm": 5.431073257614708, + "learning_rate": 1.6486724891914633e-07, + "loss": 0.5459, + "step": 20693 + }, + { + "epoch": 0.8539242386729389, + "grad_norm": 1.860314302283798, + "learning_rate": 1.6477588633911827e-07, + "loss": 0.4973, + "step": 20694 + }, + { + "epoch": 0.8539655030122968, + "grad_norm": 7.47344767545625, + "learning_rate": 1.64684547609533e-07, + "loss": 0.4988, + "step": 20695 + }, + { + "epoch": 0.8540067673516547, + "grad_norm": 6.1091996869102605, + "learning_rate": 1.6459323273202254e-07, + "loss": 0.5266, + "step": 20696 + }, + { + "epoch": 0.8540480316910126, + "grad_norm": 3.0417164268062216, + "learning_rate": 1.6450194170821724e-07, + "loss": 0.49, + "step": 20697 + }, + { + "epoch": 0.8540892960303705, + "grad_norm": 3.3883245062853438, + "learning_rate": 1.6441067453974833e-07, + "loss": 0.496, + "step": 20698 + }, + { + "epoch": 0.8541305603697285, + "grad_norm": 2.605472972768974, + "learning_rate": 1.6431943122824582e-07, + "loss": 0.4856, + "step": 20699 + }, + { + "epoch": 0.8541718247090864, + "grad_norm": 2.8103435394641796, + "learning_rate": 1.6422821177533992e-07, + "loss": 0.5084, + "step": 20700 + }, + { + "epoch": 0.8542130890484443, + "grad_norm": 3.6724122112240907, + "learning_rate": 1.6413701618265947e-07, + "loss": 0.4387, + "step": 20701 + }, + { + "epoch": 0.8542543533878023, + "grad_norm": 2.339601278494553, + "learning_rate": 1.640458444518337e-07, + "loss": 0.5396, + "step": 20702 + }, + { + "epoch": 0.8542956177271602, + "grad_norm": 4.834843292274749, + "learning_rate": 1.6395469658449109e-07, + "loss": 0.4995, + "step": 20703 + }, + { + "epoch": 0.8543368820665181, + "grad_norm": 7.199415710448319, + "learning_rate": 1.6386357258226037e-07, + "loss": 0.4892, + "step": 20704 + }, + { + "epoch": 0.8543781464058761, + "grad_norm": 2.6689067394468946, + "learning_rate": 1.6377247244676857e-07, + "loss": 0.4667, + "step": 20705 + }, + { + "epoch": 0.854419410745234, + "grad_norm": 2.810453257905962, + "learning_rate": 1.6368139617964306e-07, + "loss": 0.5128, + "step": 20706 + }, + { + "epoch": 0.8544606750845919, + "grad_norm": 7.544325131630156, + "learning_rate": 1.6359034378251053e-07, + "loss": 0.4895, + "step": 20707 + }, + { + "epoch": 0.8545019394239498, + "grad_norm": 2.59628954625149, + "learning_rate": 1.6349931525699785e-07, + "loss": 0.4853, + "step": 20708 + }, + { + "epoch": 0.8545432037633077, + "grad_norm": 1.6261288552212874, + "learning_rate": 1.6340831060473088e-07, + "loss": 0.4184, + "step": 20709 + }, + { + "epoch": 0.8545844681026656, + "grad_norm": 11.213991897747352, + "learning_rate": 1.63317329827335e-07, + "loss": 0.4564, + "step": 20710 + }, + { + "epoch": 0.8546257324420236, + "grad_norm": 5.408296969985617, + "learning_rate": 1.6322637292643539e-07, + "loss": 0.4963, + "step": 20711 + }, + { + "epoch": 0.8546669967813816, + "grad_norm": 2.982052822440586, + "learning_rate": 1.6313543990365693e-07, + "loss": 0.51, + "step": 20712 + }, + { + "epoch": 0.8547082611207395, + "grad_norm": 2.9804351368925515, + "learning_rate": 1.6304453076062416e-07, + "loss": 0.5423, + "step": 20713 + }, + { + "epoch": 0.8547495254600974, + "grad_norm": 3.429613973198394, + "learning_rate": 1.6295364549896024e-07, + "loss": 0.5318, + "step": 20714 + }, + { + "epoch": 0.8547907897994553, + "grad_norm": 3.2826268994243915, + "learning_rate": 1.6286278412028909e-07, + "loss": 0.5, + "step": 20715 + }, + { + "epoch": 0.8548320541388132, + "grad_norm": 11.850572341372018, + "learning_rate": 1.6277194662623373e-07, + "loss": 0.5099, + "step": 20716 + }, + { + "epoch": 0.8548733184781712, + "grad_norm": 4.8652756583678585, + "learning_rate": 1.6268113301841654e-07, + "loss": 0.5407, + "step": 20717 + }, + { + "epoch": 0.8549145828175291, + "grad_norm": 2.4186194827640417, + "learning_rate": 1.6259034329846e-07, + "loss": 0.5149, + "step": 20718 + }, + { + "epoch": 0.854955847156887, + "grad_norm": 4.7293308189580285, + "learning_rate": 1.624995774679852e-07, + "loss": 0.5023, + "step": 20719 + }, + { + "epoch": 0.8549971114962449, + "grad_norm": 2.9736090431354953, + "learning_rate": 1.62408835528614e-07, + "loss": 0.4861, + "step": 20720 + }, + { + "epoch": 0.8550383758356028, + "grad_norm": 3.519579551336529, + "learning_rate": 1.6231811748196745e-07, + "loss": 0.5217, + "step": 20721 + }, + { + "epoch": 0.8550796401749609, + "grad_norm": 2.4722384840762643, + "learning_rate": 1.6222742332966524e-07, + "loss": 0.4677, + "step": 20722 + }, + { + "epoch": 0.8551209045143188, + "grad_norm": 4.985183102037558, + "learning_rate": 1.6213675307332803e-07, + "loss": 0.4738, + "step": 20723 + }, + { + "epoch": 0.8551621688536767, + "grad_norm": 4.1280547234497815, + "learning_rate": 1.6204610671457508e-07, + "loss": 0.4925, + "step": 20724 + }, + { + "epoch": 0.8552034331930346, + "grad_norm": 6.365095854098516, + "learning_rate": 1.619554842550261e-07, + "loss": 0.4879, + "step": 20725 + }, + { + "epoch": 0.8552446975323925, + "grad_norm": 2.8005369066484596, + "learning_rate": 1.618648856962991e-07, + "loss": 0.4884, + "step": 20726 + }, + { + "epoch": 0.8552859618717504, + "grad_norm": 2.8120059515812477, + "learning_rate": 1.6177431104001277e-07, + "loss": 0.4539, + "step": 20727 + }, + { + "epoch": 0.8553272262111083, + "grad_norm": 3.7842146104088177, + "learning_rate": 1.6168376028778504e-07, + "loss": 0.5259, + "step": 20728 + }, + { + "epoch": 0.8553684905504663, + "grad_norm": 2.480320133785028, + "learning_rate": 1.6159323344123355e-07, + "loss": 0.5925, + "step": 20729 + }, + { + "epoch": 0.8554097548898242, + "grad_norm": 3.5749865800326197, + "learning_rate": 1.6150273050197471e-07, + "loss": 0.594, + "step": 20730 + }, + { + "epoch": 0.8554510192291821, + "grad_norm": 3.0329325241881357, + "learning_rate": 1.614122514716259e-07, + "loss": 0.5211, + "step": 20731 + }, + { + "epoch": 0.85549228356854, + "grad_norm": 6.567320719736981, + "learning_rate": 1.613217963518025e-07, + "loss": 0.4891, + "step": 20732 + }, + { + "epoch": 0.855533547907898, + "grad_norm": 2.846601718169057, + "learning_rate": 1.6123136514412117e-07, + "loss": 0.5101, + "step": 20733 + }, + { + "epoch": 0.855574812247256, + "grad_norm": 9.019844659075456, + "learning_rate": 1.6114095785019633e-07, + "loss": 0.5783, + "step": 20734 + }, + { + "epoch": 0.8556160765866139, + "grad_norm": 2.152133640759658, + "learning_rate": 1.6105057447164319e-07, + "loss": 0.5052, + "step": 20735 + }, + { + "epoch": 0.8556573409259718, + "grad_norm": 2.976001598950587, + "learning_rate": 1.6096021501007656e-07, + "loss": 0.5272, + "step": 20736 + }, + { + "epoch": 0.8556986052653297, + "grad_norm": 2.533015376091346, + "learning_rate": 1.608698794671104e-07, + "loss": 0.5435, + "step": 20737 + }, + { + "epoch": 0.8557398696046876, + "grad_norm": 3.709831283745989, + "learning_rate": 1.607795678443579e-07, + "loss": 0.4872, + "step": 20738 + }, + { + "epoch": 0.8557811339440455, + "grad_norm": 3.1781506396999726, + "learning_rate": 1.6068928014343254e-07, + "loss": 0.544, + "step": 20739 + }, + { + "epoch": 0.8558223982834035, + "grad_norm": 5.366410748303011, + "learning_rate": 1.6059901636594708e-07, + "loss": 0.5255, + "step": 20740 + }, + { + "epoch": 0.8558636626227614, + "grad_norm": 4.316255509804378, + "learning_rate": 1.6050877651351426e-07, + "loss": 0.4999, + "step": 20741 + }, + { + "epoch": 0.8559049269621193, + "grad_norm": 5.240417212076479, + "learning_rate": 1.6041856058774507e-07, + "loss": 0.4917, + "step": 20742 + }, + { + "epoch": 0.8559461913014773, + "grad_norm": 11.93878557995211, + "learning_rate": 1.6032836859025175e-07, + "loss": 0.5118, + "step": 20743 + }, + { + "epoch": 0.8559874556408352, + "grad_norm": 2.171591954041889, + "learning_rate": 1.6023820052264516e-07, + "loss": 0.4758, + "step": 20744 + }, + { + "epoch": 0.8560287199801931, + "grad_norm": 2.9892788821430796, + "learning_rate": 1.6014805638653584e-07, + "loss": 0.4802, + "step": 20745 + }, + { + "epoch": 0.8560699843195511, + "grad_norm": 2.513410502162693, + "learning_rate": 1.600579361835342e-07, + "loss": 0.4662, + "step": 20746 + }, + { + "epoch": 0.856111248658909, + "grad_norm": 3.251625422568188, + "learning_rate": 1.5996783991524943e-07, + "loss": 0.4967, + "step": 20747 + }, + { + "epoch": 0.8561525129982669, + "grad_norm": 3.5953295576146997, + "learning_rate": 1.5987776758329142e-07, + "loss": 0.5308, + "step": 20748 + }, + { + "epoch": 0.8561937773376248, + "grad_norm": 2.7480992212178683, + "learning_rate": 1.5978771918926905e-07, + "loss": 0.4955, + "step": 20749 + }, + { + "epoch": 0.8562350416769827, + "grad_norm": 3.422758905429611, + "learning_rate": 1.5969769473479084e-07, + "loss": 0.505, + "step": 20750 + }, + { + "epoch": 0.8562763060163406, + "grad_norm": 3.587861992997816, + "learning_rate": 1.5960769422146453e-07, + "loss": 0.4966, + "step": 20751 + }, + { + "epoch": 0.8563175703556986, + "grad_norm": 5.171513485629233, + "learning_rate": 1.5951771765089785e-07, + "loss": 0.5652, + "step": 20752 + }, + { + "epoch": 0.8563588346950566, + "grad_norm": 42.8323033471365, + "learning_rate": 1.5942776502469814e-07, + "loss": 0.491, + "step": 20753 + }, + { + "epoch": 0.8564000990344145, + "grad_norm": 3.3416874097529807, + "learning_rate": 1.5933783634447245e-07, + "loss": 0.5271, + "step": 20754 + }, + { + "epoch": 0.8564413633737724, + "grad_norm": 3.027330409486976, + "learning_rate": 1.5924793161182655e-07, + "loss": 0.5132, + "step": 20755 + }, + { + "epoch": 0.8564826277131303, + "grad_norm": 4.249697372660024, + "learning_rate": 1.591580508283666e-07, + "loss": 0.4559, + "step": 20756 + }, + { + "epoch": 0.8565238920524882, + "grad_norm": 3.1084734835398824, + "learning_rate": 1.590681939956985e-07, + "loss": 0.5076, + "step": 20757 + }, + { + "epoch": 0.8565651563918462, + "grad_norm": 18.89463031751955, + "learning_rate": 1.5897836111542697e-07, + "loss": 0.5134, + "step": 20758 + }, + { + "epoch": 0.8566064207312041, + "grad_norm": 4.867634585322216, + "learning_rate": 1.588885521891562e-07, + "loss": 0.5058, + "step": 20759 + }, + { + "epoch": 0.856647685070562, + "grad_norm": 7.893102627717129, + "learning_rate": 1.5879876721849095e-07, + "loss": 0.5156, + "step": 20760 + }, + { + "epoch": 0.8566889494099199, + "grad_norm": 3.5111406302529544, + "learning_rate": 1.5870900620503474e-07, + "loss": 0.452, + "step": 20761 + }, + { + "epoch": 0.8567302137492778, + "grad_norm": 2.938437235925139, + "learning_rate": 1.586192691503915e-07, + "loss": 0.5291, + "step": 20762 + }, + { + "epoch": 0.8567714780886359, + "grad_norm": 2.3766493295123627, + "learning_rate": 1.5852955605616353e-07, + "loss": 0.5017, + "step": 20763 + }, + { + "epoch": 0.8568127424279938, + "grad_norm": 3.1292830275342682, + "learning_rate": 1.584398669239533e-07, + "loss": 0.4907, + "step": 20764 + }, + { + "epoch": 0.8568540067673517, + "grad_norm": 2.171264519550848, + "learning_rate": 1.583502017553633e-07, + "loss": 0.4882, + "step": 20765 + }, + { + "epoch": 0.8568952711067096, + "grad_norm": 2.465470443830705, + "learning_rate": 1.582605605519953e-07, + "loss": 0.513, + "step": 20766 + }, + { + "epoch": 0.8569365354460675, + "grad_norm": 2.561709167852975, + "learning_rate": 1.5817094331544979e-07, + "loss": 0.4613, + "step": 20767 + }, + { + "epoch": 0.8569777997854254, + "grad_norm": 2.5407666191472256, + "learning_rate": 1.5808135004732804e-07, + "loss": 0.5324, + "step": 20768 + }, + { + "epoch": 0.8570190641247833, + "grad_norm": 10.041632711982901, + "learning_rate": 1.5799178074923026e-07, + "loss": 0.4848, + "step": 20769 + }, + { + "epoch": 0.8570603284641413, + "grad_norm": 2.7097660647908124, + "learning_rate": 1.5790223542275683e-07, + "loss": 0.4786, + "step": 20770 + }, + { + "epoch": 0.8571015928034992, + "grad_norm": 11.674980085065032, + "learning_rate": 1.5781271406950682e-07, + "loss": 0.5285, + "step": 20771 + }, + { + "epoch": 0.8571428571428571, + "grad_norm": 7.931906271427605, + "learning_rate": 1.5772321669107926e-07, + "loss": 0.4694, + "step": 20772 + }, + { + "epoch": 0.8571841214822151, + "grad_norm": 8.325768826598162, + "learning_rate": 1.5763374328907272e-07, + "loss": 0.526, + "step": 20773 + }, + { + "epoch": 0.857225385821573, + "grad_norm": 4.882141920253423, + "learning_rate": 1.5754429386508605e-07, + "loss": 0.491, + "step": 20774 + }, + { + "epoch": 0.857266650160931, + "grad_norm": 2.0032785246025253, + "learning_rate": 1.574548684207162e-07, + "loss": 0.5165, + "step": 20775 + }, + { + "epoch": 0.8573079145002889, + "grad_norm": 4.090966990595209, + "learning_rate": 1.5736546695756104e-07, + "loss": 0.4955, + "step": 20776 + }, + { + "epoch": 0.8573491788396468, + "grad_norm": 12.545488198154732, + "learning_rate": 1.5727608947721727e-07, + "loss": 0.5315, + "step": 20777 + }, + { + "epoch": 0.8573904431790047, + "grad_norm": 2.9554924535906566, + "learning_rate": 1.5718673598128196e-07, + "loss": 0.5292, + "step": 20778 + }, + { + "epoch": 0.8574317075183626, + "grad_norm": 6.9447183899782, + "learning_rate": 1.5709740647135034e-07, + "loss": 0.5193, + "step": 20779 + }, + { + "epoch": 0.8574729718577205, + "grad_norm": 2.1674768261945316, + "learning_rate": 1.5700810094901846e-07, + "loss": 0.4618, + "step": 20780 + }, + { + "epoch": 0.8575142361970784, + "grad_norm": 2.9657124566828266, + "learning_rate": 1.5691881941588154e-07, + "loss": 0.4558, + "step": 20781 + }, + { + "epoch": 0.8575555005364364, + "grad_norm": 2.722973701283187, + "learning_rate": 1.568295618735348e-07, + "loss": 0.4997, + "step": 20782 + }, + { + "epoch": 0.8575967648757944, + "grad_norm": 8.403835163958036, + "learning_rate": 1.567403283235718e-07, + "loss": 0.5129, + "step": 20783 + }, + { + "epoch": 0.8576380292151523, + "grad_norm": 4.357967577493797, + "learning_rate": 1.5665111876758716e-07, + "loss": 0.5603, + "step": 20784 + }, + { + "epoch": 0.8576792935545102, + "grad_norm": 3.3990321492591336, + "learning_rate": 1.5656193320717382e-07, + "loss": 0.4838, + "step": 20785 + }, + { + "epoch": 0.8577205578938681, + "grad_norm": 2.509642512685611, + "learning_rate": 1.564727716439251e-07, + "loss": 0.4939, + "step": 20786 + }, + { + "epoch": 0.8577618222332261, + "grad_norm": 2.062222925655413, + "learning_rate": 1.5638363407943385e-07, + "loss": 0.4716, + "step": 20787 + }, + { + "epoch": 0.857803086572584, + "grad_norm": 2.6278629770107136, + "learning_rate": 1.5629452051529198e-07, + "loss": 0.5162, + "step": 20788 + }, + { + "epoch": 0.8578443509119419, + "grad_norm": 177.45351041340115, + "learning_rate": 1.5620543095309136e-07, + "loss": 0.5168, + "step": 20789 + }, + { + "epoch": 0.8578856152512998, + "grad_norm": 9.68716765325765, + "learning_rate": 1.5611636539442343e-07, + "loss": 0.4816, + "step": 20790 + }, + { + "epoch": 0.8579268795906577, + "grad_norm": 3.3913408908086136, + "learning_rate": 1.5602732384087936e-07, + "loss": 0.4924, + "step": 20791 + }, + { + "epoch": 0.8579681439300156, + "grad_norm": 4.659664170972968, + "learning_rate": 1.559383062940491e-07, + "loss": 0.5663, + "step": 20792 + }, + { + "epoch": 0.8580094082693736, + "grad_norm": 2.524741533095841, + "learning_rate": 1.55849312755523e-07, + "loss": 0.4133, + "step": 20793 + }, + { + "epoch": 0.8580506726087316, + "grad_norm": 2.934374422965868, + "learning_rate": 1.5576034322689097e-07, + "loss": 0.5435, + "step": 20794 + }, + { + "epoch": 0.8580919369480895, + "grad_norm": 3.4310667543363125, + "learning_rate": 1.5567139770974175e-07, + "loss": 0.5094, + "step": 20795 + }, + { + "epoch": 0.8581332012874474, + "grad_norm": 3.875926855427969, + "learning_rate": 1.555824762056644e-07, + "loss": 0.4832, + "step": 20796 + }, + { + "epoch": 0.8581744656268053, + "grad_norm": 2.2423776047563537, + "learning_rate": 1.5549357871624747e-07, + "loss": 0.4656, + "step": 20797 + }, + { + "epoch": 0.8582157299661632, + "grad_norm": 3.094441052482918, + "learning_rate": 1.5540470524307836e-07, + "loss": 0.5572, + "step": 20798 + }, + { + "epoch": 0.8582569943055212, + "grad_norm": 2.2778404579815446, + "learning_rate": 1.5531585578774516e-07, + "loss": 0.5365, + "step": 20799 + }, + { + "epoch": 0.8582982586448791, + "grad_norm": 2.1846179213047727, + "learning_rate": 1.552270303518344e-07, + "loss": 0.4855, + "step": 20800 + }, + { + "epoch": 0.858339522984237, + "grad_norm": 7.262037608455406, + "learning_rate": 1.5513822893693314e-07, + "loss": 0.4919, + "step": 20801 + }, + { + "epoch": 0.8583807873235949, + "grad_norm": 2.1990509802363607, + "learning_rate": 1.5504945154462712e-07, + "loss": 0.4714, + "step": 20802 + }, + { + "epoch": 0.8584220516629528, + "grad_norm": 3.0545948611625953, + "learning_rate": 1.5496069817650292e-07, + "loss": 0.5043, + "step": 20803 + }, + { + "epoch": 0.8584633160023109, + "grad_norm": 5.517360871217575, + "learning_rate": 1.548719688341453e-07, + "loss": 0.5523, + "step": 20804 + }, + { + "epoch": 0.8585045803416688, + "grad_norm": 2.5444535433733835, + "learning_rate": 1.5478326351913907e-07, + "loss": 0.5125, + "step": 20805 + }, + { + "epoch": 0.8585458446810267, + "grad_norm": 2.4783967705049554, + "learning_rate": 1.5469458223306905e-07, + "loss": 0.5586, + "step": 20806 + }, + { + "epoch": 0.8585871090203846, + "grad_norm": 5.61041697487362, + "learning_rate": 1.546059249775194e-07, + "loss": 0.5369, + "step": 20807 + }, + { + "epoch": 0.8586283733597425, + "grad_norm": 3.7964861138806945, + "learning_rate": 1.5451729175407341e-07, + "loss": 0.5347, + "step": 20808 + }, + { + "epoch": 0.8586696376991004, + "grad_norm": 2.3792646757980287, + "learning_rate": 1.5442868256431447e-07, + "loss": 0.4952, + "step": 20809 + }, + { + "epoch": 0.8587109020384583, + "grad_norm": 3.752288523751364, + "learning_rate": 1.5434009740982564e-07, + "loss": 0.4925, + "step": 20810 + }, + { + "epoch": 0.8587521663778163, + "grad_norm": 2.690060233101509, + "learning_rate": 1.5425153629218913e-07, + "loss": 0.5082, + "step": 20811 + }, + { + "epoch": 0.8587934307171742, + "grad_norm": 4.577370164267196, + "learning_rate": 1.5416299921298622e-07, + "loss": 0.5169, + "step": 20812 + }, + { + "epoch": 0.8588346950565321, + "grad_norm": 2.273622284143545, + "learning_rate": 1.5407448617379911e-07, + "loss": 0.5232, + "step": 20813 + }, + { + "epoch": 0.8588759593958901, + "grad_norm": 5.456758923925101, + "learning_rate": 1.5398599717620853e-07, + "loss": 0.4999, + "step": 20814 + }, + { + "epoch": 0.858917223735248, + "grad_norm": 2.7187281022742784, + "learning_rate": 1.538975322217956e-07, + "loss": 0.5353, + "step": 20815 + }, + { + "epoch": 0.858958488074606, + "grad_norm": 4.967648047940851, + "learning_rate": 1.5380909131213983e-07, + "loss": 0.4775, + "step": 20816 + }, + { + "epoch": 0.8589997524139639, + "grad_norm": 7.232258671277637, + "learning_rate": 1.5372067444882133e-07, + "loss": 0.4907, + "step": 20817 + }, + { + "epoch": 0.8590410167533218, + "grad_norm": 8.8577537068348, + "learning_rate": 1.5363228163341947e-07, + "loss": 0.5064, + "step": 20818 + }, + { + "epoch": 0.8590822810926797, + "grad_norm": 2.4752822595556614, + "learning_rate": 1.535439128675135e-07, + "loss": 0.5505, + "step": 20819 + }, + { + "epoch": 0.8591235454320376, + "grad_norm": 6.5617118809066355, + "learning_rate": 1.5345556815268136e-07, + "loss": 0.5334, + "step": 20820 + }, + { + "epoch": 0.8591648097713955, + "grad_norm": 2.9927027185629407, + "learning_rate": 1.5336724749050108e-07, + "loss": 0.5035, + "step": 20821 + }, + { + "epoch": 0.8592060741107534, + "grad_norm": 4.089171475021913, + "learning_rate": 1.5327895088255072e-07, + "loss": 0.4682, + "step": 20822 + }, + { + "epoch": 0.8592473384501114, + "grad_norm": 18.012804932689978, + "learning_rate": 1.5319067833040758e-07, + "loss": 0.4865, + "step": 20823 + }, + { + "epoch": 0.8592886027894694, + "grad_norm": 2.718200773101018, + "learning_rate": 1.53102429835648e-07, + "loss": 0.5103, + "step": 20824 + }, + { + "epoch": 0.8593298671288273, + "grad_norm": 3.498256957454186, + "learning_rate": 1.5301420539984807e-07, + "loss": 0.5253, + "step": 20825 + }, + { + "epoch": 0.8593711314681852, + "grad_norm": 3.028968301054282, + "learning_rate": 1.529260050245842e-07, + "loss": 0.5334, + "step": 20826 + }, + { + "epoch": 0.8594123958075431, + "grad_norm": 2.375534881257077, + "learning_rate": 1.528378287114318e-07, + "loss": 0.5317, + "step": 20827 + }, + { + "epoch": 0.859453660146901, + "grad_norm": 5.87508283176509, + "learning_rate": 1.5274967646196614e-07, + "loss": 0.5665, + "step": 20828 + }, + { + "epoch": 0.859494924486259, + "grad_norm": 13.88502209012203, + "learning_rate": 1.5266154827776124e-07, + "loss": 0.5195, + "step": 20829 + }, + { + "epoch": 0.8595361888256169, + "grad_norm": 3.1132831059152597, + "learning_rate": 1.5257344416039153e-07, + "loss": 0.5171, + "step": 20830 + }, + { + "epoch": 0.8595774531649748, + "grad_norm": 4.469944029344584, + "learning_rate": 1.5248536411143095e-07, + "loss": 0.4873, + "step": 20831 + }, + { + "epoch": 0.8596187175043327, + "grad_norm": 2.6855262989267734, + "learning_rate": 1.52397308132453e-07, + "loss": 0.5056, + "step": 20832 + }, + { + "epoch": 0.8596599818436906, + "grad_norm": 2.970297529284747, + "learning_rate": 1.5230927622502999e-07, + "loss": 0.5075, + "step": 20833 + }, + { + "epoch": 0.8597012461830487, + "grad_norm": 1.8272205875415513, + "learning_rate": 1.522212683907348e-07, + "loss": 0.4851, + "step": 20834 + }, + { + "epoch": 0.8597425105224066, + "grad_norm": 2.5423175721238085, + "learning_rate": 1.5213328463113952e-07, + "loss": 0.5912, + "step": 20835 + }, + { + "epoch": 0.8597837748617645, + "grad_norm": 7.569064512036721, + "learning_rate": 1.520453249478157e-07, + "loss": 0.5429, + "step": 20836 + }, + { + "epoch": 0.8598250392011224, + "grad_norm": 2.4054444949900824, + "learning_rate": 1.5195738934233427e-07, + "loss": 0.5076, + "step": 20837 + }, + { + "epoch": 0.8598663035404803, + "grad_norm": 2.293058929753664, + "learning_rate": 1.51869477816266e-07, + "loss": 0.5126, + "step": 20838 + }, + { + "epoch": 0.8599075678798382, + "grad_norm": 2.847928064191466, + "learning_rate": 1.517815903711814e-07, + "loss": 0.5707, + "step": 20839 + }, + { + "epoch": 0.8599488322191962, + "grad_norm": 2.692320532904878, + "learning_rate": 1.5169372700865059e-07, + "loss": 0.5046, + "step": 20840 + }, + { + "epoch": 0.8599900965585541, + "grad_norm": 2.830100890094445, + "learning_rate": 1.5160588773024248e-07, + "loss": 0.5318, + "step": 20841 + }, + { + "epoch": 0.860031360897912, + "grad_norm": 8.140162905116128, + "learning_rate": 1.5151807253752646e-07, + "loss": 0.5124, + "step": 20842 + }, + { + "epoch": 0.8600726252372699, + "grad_norm": 4.0010077322696, + "learning_rate": 1.51430281432071e-07, + "loss": 0.4865, + "step": 20843 + }, + { + "epoch": 0.8601138895766279, + "grad_norm": 10.552638324442265, + "learning_rate": 1.513425144154446e-07, + "loss": 0.5029, + "step": 20844 + }, + { + "epoch": 0.8601551539159858, + "grad_norm": 2.854617837760298, + "learning_rate": 1.5125477148921442e-07, + "loss": 0.5229, + "step": 20845 + }, + { + "epoch": 0.8601964182553438, + "grad_norm": 17.105819621891182, + "learning_rate": 1.5116705265494813e-07, + "loss": 0.5365, + "step": 20846 + }, + { + "epoch": 0.8602376825947017, + "grad_norm": 5.715462500126542, + "learning_rate": 1.5107935791421235e-07, + "loss": 0.4815, + "step": 20847 + }, + { + "epoch": 0.8602789469340596, + "grad_norm": 3.1122686123703502, + "learning_rate": 1.5099168726857415e-07, + "loss": 0.5618, + "step": 20848 + }, + { + "epoch": 0.8603202112734175, + "grad_norm": 2.0270155122456868, + "learning_rate": 1.5090404071959912e-07, + "loss": 0.4535, + "step": 20849 + }, + { + "epoch": 0.8603614756127754, + "grad_norm": 2.7752256029139533, + "learning_rate": 1.5081641826885268e-07, + "loss": 0.548, + "step": 20850 + }, + { + "epoch": 0.8604027399521333, + "grad_norm": 6.472758815209234, + "learning_rate": 1.5072881991789988e-07, + "loss": 0.5205, + "step": 20851 + }, + { + "epoch": 0.8604440042914913, + "grad_norm": 3.8783823010396135, + "learning_rate": 1.5064124566830618e-07, + "loss": 0.5208, + "step": 20852 + }, + { + "epoch": 0.8604852686308492, + "grad_norm": 6.462810929336815, + "learning_rate": 1.5055369552163512e-07, + "loss": 0.5084, + "step": 20853 + }, + { + "epoch": 0.8605265329702071, + "grad_norm": 7.661704775664861, + "learning_rate": 1.504661694794508e-07, + "loss": 0.5396, + "step": 20854 + }, + { + "epoch": 0.8605677973095651, + "grad_norm": 2.827936971177037, + "learning_rate": 1.503786675433168e-07, + "loss": 0.4855, + "step": 20855 + }, + { + "epoch": 0.860609061648923, + "grad_norm": 6.259612996563532, + "learning_rate": 1.502911897147962e-07, + "loss": 0.5022, + "step": 20856 + }, + { + "epoch": 0.860650325988281, + "grad_norm": 2.3925088188327557, + "learning_rate": 1.5020373599545112e-07, + "loss": 0.4953, + "step": 20857 + }, + { + "epoch": 0.8606915903276389, + "grad_norm": 2.879421925460139, + "learning_rate": 1.501163063868441e-07, + "loss": 0.509, + "step": 20858 + }, + { + "epoch": 0.8607328546669968, + "grad_norm": 8.159873007393536, + "learning_rate": 1.5002890089053673e-07, + "loss": 0.4994, + "step": 20859 + }, + { + "epoch": 0.8607741190063547, + "grad_norm": 7.149986354147796, + "learning_rate": 1.4994151950809042e-07, + "loss": 0.5058, + "step": 20860 + }, + { + "epoch": 0.8608153833457126, + "grad_norm": 3.3866472904331624, + "learning_rate": 1.498541622410658e-07, + "loss": 0.4921, + "step": 20861 + }, + { + "epoch": 0.8608566476850705, + "grad_norm": 5.500386537425484, + "learning_rate": 1.4976682909102357e-07, + "loss": 0.499, + "step": 20862 + }, + { + "epoch": 0.8608979120244284, + "grad_norm": 4.493111097185459, + "learning_rate": 1.4967952005952334e-07, + "loss": 0.5867, + "step": 20863 + }, + { + "epoch": 0.8609391763637864, + "grad_norm": 3.552208048761451, + "learning_rate": 1.4959223514812471e-07, + "loss": 0.498, + "step": 20864 + }, + { + "epoch": 0.8609804407031444, + "grad_norm": 8.093793859783514, + "learning_rate": 1.495049743583874e-07, + "loss": 0.5552, + "step": 20865 + }, + { + "epoch": 0.8610217050425023, + "grad_norm": 2.4668422283377356, + "learning_rate": 1.4941773769186934e-07, + "loss": 0.4668, + "step": 20866 + }, + { + "epoch": 0.8610629693818602, + "grad_norm": 2.8640422590442145, + "learning_rate": 1.4933052515012913e-07, + "loss": 0.504, + "step": 20867 + }, + { + "epoch": 0.8611042337212181, + "grad_norm": 4.976875996288219, + "learning_rate": 1.4924333673472451e-07, + "loss": 0.535, + "step": 20868 + }, + { + "epoch": 0.861145498060576, + "grad_norm": 2.2491664013051786, + "learning_rate": 1.4915617244721342e-07, + "loss": 0.5201, + "step": 20869 + }, + { + "epoch": 0.861186762399934, + "grad_norm": 2.6985153655207816, + "learning_rate": 1.490690322891519e-07, + "loss": 0.498, + "step": 20870 + }, + { + "epoch": 0.8612280267392919, + "grad_norm": 3.7632293350234765, + "learning_rate": 1.4898191626209713e-07, + "loss": 0.5185, + "step": 20871 + }, + { + "epoch": 0.8612692910786498, + "grad_norm": 2.4848777518773026, + "learning_rate": 1.4889482436760542e-07, + "loss": 0.4912, + "step": 20872 + }, + { + "epoch": 0.8613105554180077, + "grad_norm": 2.952402456238502, + "learning_rate": 1.4880775660723163e-07, + "loss": 0.5201, + "step": 20873 + }, + { + "epoch": 0.8613518197573656, + "grad_norm": 3.191228290278444, + "learning_rate": 1.487207129825316e-07, + "loss": 0.5109, + "step": 20874 + }, + { + "epoch": 0.8613930840967237, + "grad_norm": 3.9903884267002545, + "learning_rate": 1.4863369349505995e-07, + "loss": 0.5184, + "step": 20875 + }, + { + "epoch": 0.8614343484360816, + "grad_norm": 3.2345069264602757, + "learning_rate": 1.4854669814637145e-07, + "loss": 0.5156, + "step": 20876 + }, + { + "epoch": 0.8614756127754395, + "grad_norm": 3.0888284379483406, + "learning_rate": 1.4845972693801967e-07, + "loss": 0.4542, + "step": 20877 + }, + { + "epoch": 0.8615168771147974, + "grad_norm": 4.5099099140873395, + "learning_rate": 1.483727798715579e-07, + "loss": 0.5499, + "step": 20878 + }, + { + "epoch": 0.8615581414541553, + "grad_norm": 3.992524907939731, + "learning_rate": 1.482858569485397e-07, + "loss": 0.5199, + "step": 20879 + }, + { + "epoch": 0.8615994057935132, + "grad_norm": 3.028411645667146, + "learning_rate": 1.481989581705175e-07, + "loss": 0.5327, + "step": 20880 + }, + { + "epoch": 0.8616406701328712, + "grad_norm": 5.357463147910782, + "learning_rate": 1.481120835390437e-07, + "loss": 0.4689, + "step": 20881 + }, + { + "epoch": 0.8616819344722291, + "grad_norm": 2.4237527444191476, + "learning_rate": 1.4802523305566995e-07, + "loss": 0.5039, + "step": 20882 + }, + { + "epoch": 0.861723198811587, + "grad_norm": 3.068151655434148, + "learning_rate": 1.4793840672194746e-07, + "loss": 0.5703, + "step": 20883 + }, + { + "epoch": 0.8617644631509449, + "grad_norm": 2.7774804833517557, + "learning_rate": 1.4785160453942754e-07, + "loss": 0.5041, + "step": 20884 + }, + { + "epoch": 0.8618057274903029, + "grad_norm": 2.1403393363127137, + "learning_rate": 1.4776482650966073e-07, + "loss": 0.5431, + "step": 20885 + }, + { + "epoch": 0.8618469918296608, + "grad_norm": 3.279825376564114, + "learning_rate": 1.4767807263419663e-07, + "loss": 0.5352, + "step": 20886 + }, + { + "epoch": 0.8618882561690188, + "grad_norm": 2.4181666799411623, + "learning_rate": 1.475913429145852e-07, + "loss": 0.52, + "step": 20887 + }, + { + "epoch": 0.8619295205083767, + "grad_norm": 2.259949304794223, + "learning_rate": 1.475046373523755e-07, + "loss": 0.4848, + "step": 20888 + }, + { + "epoch": 0.8619707848477346, + "grad_norm": 3.531765224355131, + "learning_rate": 1.47417955949117e-07, + "loss": 0.5177, + "step": 20889 + }, + { + "epoch": 0.8620120491870925, + "grad_norm": 2.33960535158428, + "learning_rate": 1.4733129870635674e-07, + "loss": 0.55, + "step": 20890 + }, + { + "epoch": 0.8620533135264504, + "grad_norm": 6.516890946143506, + "learning_rate": 1.4724466562564354e-07, + "loss": 0.5248, + "step": 20891 + }, + { + "epoch": 0.8620945778658083, + "grad_norm": 2.1776841776599984, + "learning_rate": 1.471580567085246e-07, + "loss": 0.4732, + "step": 20892 + }, + { + "epoch": 0.8621358422051663, + "grad_norm": 2.6059311244944454, + "learning_rate": 1.4707147195654742e-07, + "loss": 0.5308, + "step": 20893 + }, + { + "epoch": 0.8621771065445242, + "grad_norm": 2.3895578471208, + "learning_rate": 1.4698491137125786e-07, + "loss": 0.4317, + "step": 20894 + }, + { + "epoch": 0.8622183708838822, + "grad_norm": 2.6356439739637723, + "learning_rate": 1.4689837495420255e-07, + "loss": 0.529, + "step": 20895 + }, + { + "epoch": 0.8622596352232401, + "grad_norm": 6.824858831785511, + "learning_rate": 1.4681186270692725e-07, + "loss": 0.5461, + "step": 20896 + }, + { + "epoch": 0.862300899562598, + "grad_norm": 2.808053809698087, + "learning_rate": 1.4672537463097757e-07, + "loss": 0.5106, + "step": 20897 + }, + { + "epoch": 0.862342163901956, + "grad_norm": 5.705010549440051, + "learning_rate": 1.4663891072789775e-07, + "loss": 0.5846, + "step": 20898 + }, + { + "epoch": 0.8623834282413139, + "grad_norm": 2.6196493059413943, + "learning_rate": 1.4655247099923242e-07, + "loss": 0.5252, + "step": 20899 + }, + { + "epoch": 0.8624246925806718, + "grad_norm": 3.661005424217589, + "learning_rate": 1.4646605544652596e-07, + "loss": 0.5775, + "step": 20900 + }, + { + "epoch": 0.8624659569200297, + "grad_norm": 2.9197454837163512, + "learning_rate": 1.4637966407132202e-07, + "loss": 0.5046, + "step": 20901 + }, + { + "epoch": 0.8625072212593876, + "grad_norm": 7.309807702295779, + "learning_rate": 1.462932968751635e-07, + "loss": 0.4784, + "step": 20902 + }, + { + "epoch": 0.8625484855987455, + "grad_norm": 3.0805647098027924, + "learning_rate": 1.4620695385959288e-07, + "loss": 0.4692, + "step": 20903 + }, + { + "epoch": 0.8625897499381034, + "grad_norm": 2.6649995694453477, + "learning_rate": 1.4612063502615269e-07, + "loss": 0.5643, + "step": 20904 + }, + { + "epoch": 0.8626310142774615, + "grad_norm": 3.1700464261035393, + "learning_rate": 1.460343403763849e-07, + "loss": 0.5288, + "step": 20905 + }, + { + "epoch": 0.8626722786168194, + "grad_norm": 2.8020536369918227, + "learning_rate": 1.4594806991183112e-07, + "loss": 0.5091, + "step": 20906 + }, + { + "epoch": 0.8627135429561773, + "grad_norm": 2.9155536264958615, + "learning_rate": 1.4586182363403177e-07, + "loss": 0.4999, + "step": 20907 + }, + { + "epoch": 0.8627548072955352, + "grad_norm": 3.187584670702547, + "learning_rate": 1.4577560154452797e-07, + "loss": 0.4684, + "step": 20908 + }, + { + "epoch": 0.8627960716348931, + "grad_norm": 3.2304768816975806, + "learning_rate": 1.456894036448596e-07, + "loss": 0.5349, + "step": 20909 + }, + { + "epoch": 0.862837335974251, + "grad_norm": 3.036758907588507, + "learning_rate": 1.4560322993656665e-07, + "loss": 0.4825, + "step": 20910 + }, + { + "epoch": 0.862878600313609, + "grad_norm": 3.400585616218448, + "learning_rate": 1.4551708042118805e-07, + "loss": 0.4582, + "step": 20911 + }, + { + "epoch": 0.8629198646529669, + "grad_norm": 2.845797594965276, + "learning_rate": 1.4543095510026273e-07, + "loss": 0.5447, + "step": 20912 + }, + { + "epoch": 0.8629611289923248, + "grad_norm": 6.894068381144356, + "learning_rate": 1.4534485397532943e-07, + "loss": 0.5666, + "step": 20913 + }, + { + "epoch": 0.8630023933316827, + "grad_norm": 8.320560432406447, + "learning_rate": 1.4525877704792563e-07, + "loss": 0.4526, + "step": 20914 + }, + { + "epoch": 0.8630436576710406, + "grad_norm": 2.9296970224730874, + "learning_rate": 1.451727243195894e-07, + "loss": 0.6339, + "step": 20915 + }, + { + "epoch": 0.8630849220103987, + "grad_norm": 2.1400029984855693, + "learning_rate": 1.4508669579185718e-07, + "loss": 0.5484, + "step": 20916 + }, + { + "epoch": 0.8631261863497566, + "grad_norm": 13.878140054488794, + "learning_rate": 1.450006914662661e-07, + "loss": 0.5459, + "step": 20917 + }, + { + "epoch": 0.8631674506891145, + "grad_norm": 2.5754406691752356, + "learning_rate": 1.4491471134435276e-07, + "loss": 0.5386, + "step": 20918 + }, + { + "epoch": 0.8632087150284724, + "grad_norm": 2.78159945785794, + "learning_rate": 1.4482875542765205e-07, + "loss": 0.518, + "step": 20919 + }, + { + "epoch": 0.8632499793678303, + "grad_norm": 3.5778267151092162, + "learning_rate": 1.4474282371770016e-07, + "loss": 0.5246, + "step": 20920 + }, + { + "epoch": 0.8632912437071882, + "grad_norm": 13.585705350001783, + "learning_rate": 1.446569162160316e-07, + "loss": 0.5583, + "step": 20921 + }, + { + "epoch": 0.8633325080465462, + "grad_norm": 3.6005762155537595, + "learning_rate": 1.4457103292418138e-07, + "loss": 0.476, + "step": 20922 + }, + { + "epoch": 0.8633737723859041, + "grad_norm": 2.531515987807055, + "learning_rate": 1.444851738436831e-07, + "loss": 0.5448, + "step": 20923 + }, + { + "epoch": 0.863415036725262, + "grad_norm": 3.4531369700603634, + "learning_rate": 1.443993389760705e-07, + "loss": 0.5067, + "step": 20924 + }, + { + "epoch": 0.8634563010646199, + "grad_norm": 2.7509736945363126, + "learning_rate": 1.4431352832287688e-07, + "loss": 0.5325, + "step": 20925 + }, + { + "epoch": 0.8634975654039779, + "grad_norm": 4.998984888455387, + "learning_rate": 1.442277418856353e-07, + "loss": 0.4916, + "step": 20926 + }, + { + "epoch": 0.8635388297433358, + "grad_norm": 2.8633398896905398, + "learning_rate": 1.4414197966587777e-07, + "loss": 0.5085, + "step": 20927 + }, + { + "epoch": 0.8635800940826938, + "grad_norm": 3.651693796634531, + "learning_rate": 1.4405624166513636e-07, + "loss": 0.4556, + "step": 20928 + }, + { + "epoch": 0.8636213584220517, + "grad_norm": 7.81005628611634, + "learning_rate": 1.4397052788494235e-07, + "loss": 0.5657, + "step": 20929 + }, + { + "epoch": 0.8636626227614096, + "grad_norm": 7.446325857314336, + "learning_rate": 1.4388483832682736e-07, + "loss": 0.5045, + "step": 20930 + }, + { + "epoch": 0.8637038871007675, + "grad_norm": 4.956869427649736, + "learning_rate": 1.4379917299232132e-07, + "loss": 0.523, + "step": 20931 + }, + { + "epoch": 0.8637451514401254, + "grad_norm": 2.2796666331133366, + "learning_rate": 1.4371353188295467e-07, + "loss": 0.4482, + "step": 20932 + }, + { + "epoch": 0.8637864157794833, + "grad_norm": 2.74951889076772, + "learning_rate": 1.4362791500025718e-07, + "loss": 0.5674, + "step": 20933 + }, + { + "epoch": 0.8638276801188413, + "grad_norm": 2.619522360508658, + "learning_rate": 1.4354232234575864e-07, + "loss": 0.5366, + "step": 20934 + }, + { + "epoch": 0.8638689444581992, + "grad_norm": 2.282263704163664, + "learning_rate": 1.4345675392098733e-07, + "loss": 0.4434, + "step": 20935 + }, + { + "epoch": 0.8639102087975572, + "grad_norm": 3.6005642995113956, + "learning_rate": 1.4337120972747188e-07, + "loss": 0.5693, + "step": 20936 + }, + { + "epoch": 0.8639514731369151, + "grad_norm": 3.6851942162494526, + "learning_rate": 1.4328568976674033e-07, + "loss": 0.5301, + "step": 20937 + }, + { + "epoch": 0.863992737476273, + "grad_norm": 3.469210829674269, + "learning_rate": 1.432001940403207e-07, + "loss": 0.5582, + "step": 20938 + }, + { + "epoch": 0.864034001815631, + "grad_norm": 2.924874195759075, + "learning_rate": 1.4311472254973957e-07, + "loss": 0.468, + "step": 20939 + }, + { + "epoch": 0.8640752661549889, + "grad_norm": 2.64864636517981, + "learning_rate": 1.4302927529652386e-07, + "loss": 0.5295, + "step": 20940 + }, + { + "epoch": 0.8641165304943468, + "grad_norm": 3.1818105314514797, + "learning_rate": 1.4294385228220007e-07, + "loss": 0.4837, + "step": 20941 + }, + { + "epoch": 0.8641577948337047, + "grad_norm": 2.6159645630181854, + "learning_rate": 1.428584535082938e-07, + "loss": 0.521, + "step": 20942 + }, + { + "epoch": 0.8641990591730626, + "grad_norm": 8.884815941316514, + "learning_rate": 1.427730789763308e-07, + "loss": 0.5003, + "step": 20943 + }, + { + "epoch": 0.8642403235124205, + "grad_norm": 5.180412640318239, + "learning_rate": 1.426877286878357e-07, + "loss": 0.4785, + "step": 20944 + }, + { + "epoch": 0.8642815878517784, + "grad_norm": 2.8252905638382786, + "learning_rate": 1.4260240264433312e-07, + "loss": 0.5434, + "step": 20945 + }, + { + "epoch": 0.8643228521911365, + "grad_norm": 2.775557579386963, + "learning_rate": 1.4251710084734733e-07, + "loss": 0.5125, + "step": 20946 + }, + { + "epoch": 0.8643641165304944, + "grad_norm": 3.259382417782118, + "learning_rate": 1.4243182329840228e-07, + "loss": 0.4975, + "step": 20947 + }, + { + "epoch": 0.8644053808698523, + "grad_norm": 1.9641426221258445, + "learning_rate": 1.4234656999902075e-07, + "loss": 0.5296, + "step": 20948 + }, + { + "epoch": 0.8644466452092102, + "grad_norm": 2.1319652675934466, + "learning_rate": 1.4226134095072584e-07, + "loss": 0.4947, + "step": 20949 + }, + { + "epoch": 0.8644879095485681, + "grad_norm": 3.7401317777854453, + "learning_rate": 1.4217613615504004e-07, + "loss": 0.5099, + "step": 20950 + }, + { + "epoch": 0.864529173887926, + "grad_norm": 4.687325689061373, + "learning_rate": 1.4209095561348513e-07, + "loss": 0.5118, + "step": 20951 + }, + { + "epoch": 0.864570438227284, + "grad_norm": 3.420181440072541, + "learning_rate": 1.4200579932758252e-07, + "loss": 0.5011, + "step": 20952 + }, + { + "epoch": 0.8646117025666419, + "grad_norm": 2.8952498062550576, + "learning_rate": 1.4192066729885366e-07, + "loss": 0.463, + "step": 20953 + }, + { + "epoch": 0.8646529669059998, + "grad_norm": 2.3104539017267913, + "learning_rate": 1.4183555952881939e-07, + "loss": 0.4565, + "step": 20954 + }, + { + "epoch": 0.8646942312453577, + "grad_norm": 2.1845324116388083, + "learning_rate": 1.417504760189996e-07, + "loss": 0.548, + "step": 20955 + }, + { + "epoch": 0.8647354955847157, + "grad_norm": 3.7327048146455275, + "learning_rate": 1.4166541677091377e-07, + "loss": 0.5549, + "step": 20956 + }, + { + "epoch": 0.8647767599240737, + "grad_norm": 19.84577861131606, + "learning_rate": 1.4158038178608168e-07, + "loss": 0.5352, + "step": 20957 + }, + { + "epoch": 0.8648180242634316, + "grad_norm": 4.253901635075207, + "learning_rate": 1.4149537106602212e-07, + "loss": 0.5488, + "step": 20958 + }, + { + "epoch": 0.8648592886027895, + "grad_norm": 3.196367392581005, + "learning_rate": 1.414103846122542e-07, + "loss": 0.5366, + "step": 20959 + }, + { + "epoch": 0.8649005529421474, + "grad_norm": 1.9486589290167895, + "learning_rate": 1.4132542242629503e-07, + "loss": 0.5081, + "step": 20960 + }, + { + "epoch": 0.8649418172815053, + "grad_norm": 4.655857175586294, + "learning_rate": 1.412404845096626e-07, + "loss": 0.4932, + "step": 20961 + }, + { + "epoch": 0.8649830816208632, + "grad_norm": 3.5128701598032896, + "learning_rate": 1.4115557086387448e-07, + "loss": 0.5516, + "step": 20962 + }, + { + "epoch": 0.8650243459602212, + "grad_norm": 2.1799857939765634, + "learning_rate": 1.4107068149044734e-07, + "loss": 0.4667, + "step": 20963 + }, + { + "epoch": 0.8650656102995791, + "grad_norm": 3.1275997085416516, + "learning_rate": 1.4098581639089693e-07, + "loss": 0.5229, + "step": 20964 + }, + { + "epoch": 0.865106874638937, + "grad_norm": 2.2825798364960246, + "learning_rate": 1.409009755667397e-07, + "loss": 0.4287, + "step": 20965 + }, + { + "epoch": 0.865148138978295, + "grad_norm": 3.684541836771249, + "learning_rate": 1.4081615901949097e-07, + "loss": 0.5401, + "step": 20966 + }, + { + "epoch": 0.8651894033176529, + "grad_norm": 5.4607895532956565, + "learning_rate": 1.4073136675066616e-07, + "loss": 0.5476, + "step": 20967 + }, + { + "epoch": 0.8652306676570108, + "grad_norm": 2.360758185432982, + "learning_rate": 1.4064659876177942e-07, + "loss": 0.5309, + "step": 20968 + }, + { + "epoch": 0.8652719319963688, + "grad_norm": 5.325208842918102, + "learning_rate": 1.4056185505434472e-07, + "loss": 0.5446, + "step": 20969 + }, + { + "epoch": 0.8653131963357267, + "grad_norm": 1.8181922579870313, + "learning_rate": 1.4047713562987613e-07, + "loss": 0.5425, + "step": 20970 + }, + { + "epoch": 0.8653544606750846, + "grad_norm": 4.344307942836542, + "learning_rate": 1.4039244048988713e-07, + "loss": 0.4826, + "step": 20971 + }, + { + "epoch": 0.8653957250144425, + "grad_norm": 2.2930540120664404, + "learning_rate": 1.4030776963589005e-07, + "loss": 0.4764, + "step": 20972 + }, + { + "epoch": 0.8654369893538004, + "grad_norm": 3.6066061619206238, + "learning_rate": 1.4022312306939777e-07, + "loss": 0.5315, + "step": 20973 + }, + { + "epoch": 0.8654782536931583, + "grad_norm": 2.479664051452925, + "learning_rate": 1.40138500791922e-07, + "loss": 0.4998, + "step": 20974 + }, + { + "epoch": 0.8655195180325163, + "grad_norm": 7.7885504390820035, + "learning_rate": 1.4005390280497478e-07, + "loss": 0.4856, + "step": 20975 + }, + { + "epoch": 0.8655607823718742, + "grad_norm": 2.7850108414852737, + "learning_rate": 1.3996932911006677e-07, + "loss": 0.5643, + "step": 20976 + }, + { + "epoch": 0.8656020467112322, + "grad_norm": 4.984598656451646, + "learning_rate": 1.398847797087086e-07, + "loss": 0.5176, + "step": 20977 + }, + { + "epoch": 0.8656433110505901, + "grad_norm": 3.286132960825059, + "learning_rate": 1.398002546024109e-07, + "loss": 0.5072, + "step": 20978 + }, + { + "epoch": 0.865684575389948, + "grad_norm": 3.3113263746340906, + "learning_rate": 1.3971575379268364e-07, + "loss": 0.4966, + "step": 20979 + }, + { + "epoch": 0.865725839729306, + "grad_norm": 2.605011644819811, + "learning_rate": 1.396312772810356e-07, + "loss": 0.5068, + "step": 20980 + }, + { + "epoch": 0.8657671040686639, + "grad_norm": 4.135887779033374, + "learning_rate": 1.3954682506897638e-07, + "loss": 0.5093, + "step": 20981 + }, + { + "epoch": 0.8658083684080218, + "grad_norm": 9.433185672533428, + "learning_rate": 1.3946239715801378e-07, + "loss": 0.459, + "step": 20982 + }, + { + "epoch": 0.8658496327473797, + "grad_norm": 2.2123922986782953, + "learning_rate": 1.3937799354965648e-07, + "loss": 0.5338, + "step": 20983 + }, + { + "epoch": 0.8658908970867376, + "grad_norm": 5.9150293642453455, + "learning_rate": 1.392936142454122e-07, + "loss": 0.5323, + "step": 20984 + }, + { + "epoch": 0.8659321614260955, + "grad_norm": 7.055146768413174, + "learning_rate": 1.3920925924678763e-07, + "loss": 0.5222, + "step": 20985 + }, + { + "epoch": 0.8659734257654534, + "grad_norm": 5.165431826218226, + "learning_rate": 1.3912492855528987e-07, + "loss": 0.4636, + "step": 20986 + }, + { + "epoch": 0.8660146901048115, + "grad_norm": 3.0083750010767667, + "learning_rate": 1.3904062217242535e-07, + "loss": 0.4876, + "step": 20987 + }, + { + "epoch": 0.8660559544441694, + "grad_norm": 4.383200196169368, + "learning_rate": 1.389563400997001e-07, + "loss": 0.4787, + "step": 20988 + }, + { + "epoch": 0.8660972187835273, + "grad_norm": 3.3411196656124, + "learning_rate": 1.388720823386192e-07, + "loss": 0.5082, + "step": 20989 + }, + { + "epoch": 0.8661384831228852, + "grad_norm": 2.6423487936878036, + "learning_rate": 1.3878784889068796e-07, + "loss": 0.4578, + "step": 20990 + }, + { + "epoch": 0.8661797474622431, + "grad_norm": 18.152315632459608, + "learning_rate": 1.3870363975741118e-07, + "loss": 0.4885, + "step": 20991 + }, + { + "epoch": 0.866221011801601, + "grad_norm": 3.2665933914211442, + "learning_rate": 1.3861945494029248e-07, + "loss": 0.4704, + "step": 20992 + }, + { + "epoch": 0.866262276140959, + "grad_norm": 3.6781238663592224, + "learning_rate": 1.3853529444083618e-07, + "loss": 0.5367, + "step": 20993 + }, + { + "epoch": 0.8663035404803169, + "grad_norm": 3.4984012789275045, + "learning_rate": 1.3845115826054555e-07, + "loss": 0.5652, + "step": 20994 + }, + { + "epoch": 0.8663448048196748, + "grad_norm": 6.738280908175969, + "learning_rate": 1.3836704640092308e-07, + "loss": 0.4872, + "step": 20995 + }, + { + "epoch": 0.8663860691590327, + "grad_norm": 2.263871395611605, + "learning_rate": 1.3828295886347153e-07, + "loss": 0.4924, + "step": 20996 + }, + { + "epoch": 0.8664273334983907, + "grad_norm": 30.62316029920745, + "learning_rate": 1.3819889564969274e-07, + "loss": 0.5759, + "step": 20997 + }, + { + "epoch": 0.8664685978377487, + "grad_norm": 3.059683978983672, + "learning_rate": 1.3811485676108832e-07, + "loss": 0.4537, + "step": 20998 + }, + { + "epoch": 0.8665098621771066, + "grad_norm": 2.441913649999488, + "learning_rate": 1.3803084219915956e-07, + "loss": 0.4963, + "step": 20999 + }, + { + "epoch": 0.8665511265164645, + "grad_norm": 9.831785762822168, + "learning_rate": 1.3794685196540728e-07, + "loss": 0.5347, + "step": 21000 + }, + { + "epoch": 0.8665923908558224, + "grad_norm": 3.376729112527761, + "learning_rate": 1.378628860613313e-07, + "loss": 0.5809, + "step": 21001 + }, + { + "epoch": 0.8666336551951803, + "grad_norm": 2.886380495315107, + "learning_rate": 1.377789444884317e-07, + "loss": 0.4957, + "step": 21002 + }, + { + "epoch": 0.8666749195345382, + "grad_norm": 3.7856673982163644, + "learning_rate": 1.3769502724820782e-07, + "loss": 0.4954, + "step": 21003 + }, + { + "epoch": 0.8667161838738962, + "grad_norm": 2.952088221595714, + "learning_rate": 1.376111343421591e-07, + "loss": 0.5447, + "step": 21004 + }, + { + "epoch": 0.8667574482132541, + "grad_norm": 3.482214045889394, + "learning_rate": 1.3752726577178338e-07, + "loss": 0.5027, + "step": 21005 + }, + { + "epoch": 0.866798712552612, + "grad_norm": 2.509096987523739, + "learning_rate": 1.3744342153857908e-07, + "loss": 0.4481, + "step": 21006 + }, + { + "epoch": 0.86683997689197, + "grad_norm": 3.420138869750083, + "learning_rate": 1.3735960164404422e-07, + "loss": 0.5489, + "step": 21007 + }, + { + "epoch": 0.8668812412313279, + "grad_norm": 5.020311382512676, + "learning_rate": 1.3727580608967572e-07, + "loss": 0.503, + "step": 21008 + }, + { + "epoch": 0.8669225055706858, + "grad_norm": 3.0440389621679556, + "learning_rate": 1.3719203487696992e-07, + "loss": 0.5043, + "step": 21009 + }, + { + "epoch": 0.8669637699100438, + "grad_norm": 2.8734281921853055, + "learning_rate": 1.3710828800742358e-07, + "loss": 0.4947, + "step": 21010 + }, + { + "epoch": 0.8670050342494017, + "grad_norm": 4.686887189481499, + "learning_rate": 1.3702456548253273e-07, + "loss": 0.5985, + "step": 21011 + }, + { + "epoch": 0.8670462985887596, + "grad_norm": 2.894379370832824, + "learning_rate": 1.3694086730379313e-07, + "loss": 0.5018, + "step": 21012 + }, + { + "epoch": 0.8670875629281175, + "grad_norm": 2.87025584652673, + "learning_rate": 1.368571934726991e-07, + "loss": 0.548, + "step": 21013 + }, + { + "epoch": 0.8671288272674754, + "grad_norm": 4.179021602023173, + "learning_rate": 1.367735439907456e-07, + "loss": 0.514, + "step": 21014 + }, + { + "epoch": 0.8671700916068333, + "grad_norm": 3.5538586928929257, + "learning_rate": 1.3668991885942695e-07, + "loss": 0.4507, + "step": 21015 + }, + { + "epoch": 0.8672113559461913, + "grad_norm": 2.881175531246417, + "learning_rate": 1.366063180802371e-07, + "loss": 0.4697, + "step": 21016 + }, + { + "epoch": 0.8672526202855493, + "grad_norm": 9.428414097358402, + "learning_rate": 1.3652274165466888e-07, + "loss": 0.5377, + "step": 21017 + }, + { + "epoch": 0.8672938846249072, + "grad_norm": 7.263057166936519, + "learning_rate": 1.3643918958421542e-07, + "loss": 0.5047, + "step": 21018 + }, + { + "epoch": 0.8673351489642651, + "grad_norm": 3.029540894567083, + "learning_rate": 1.3635566187036902e-07, + "loss": 0.5545, + "step": 21019 + }, + { + "epoch": 0.867376413303623, + "grad_norm": 3.0739990998960547, + "learning_rate": 1.3627215851462215e-07, + "loss": 0.5149, + "step": 21020 + }, + { + "epoch": 0.8674176776429809, + "grad_norm": 2.70655002223808, + "learning_rate": 1.3618867951846598e-07, + "loss": 0.5291, + "step": 21021 + }, + { + "epoch": 0.8674589419823389, + "grad_norm": 4.2543590979012365, + "learning_rate": 1.361052248833916e-07, + "loss": 0.4627, + "step": 21022 + }, + { + "epoch": 0.8675002063216968, + "grad_norm": 4.365764947832004, + "learning_rate": 1.360217946108897e-07, + "loss": 0.5402, + "step": 21023 + }, + { + "epoch": 0.8675414706610547, + "grad_norm": 2.8894722156776607, + "learning_rate": 1.3593838870245074e-07, + "loss": 0.5651, + "step": 21024 + }, + { + "epoch": 0.8675827350004126, + "grad_norm": 2.865195801516162, + "learning_rate": 1.3585500715956483e-07, + "loss": 0.5119, + "step": 21025 + }, + { + "epoch": 0.8676239993397705, + "grad_norm": 2.597881085708701, + "learning_rate": 1.3577164998372083e-07, + "loss": 0.5653, + "step": 21026 + }, + { + "epoch": 0.8676652636791286, + "grad_norm": 6.216318649304519, + "learning_rate": 1.3568831717640785e-07, + "loss": 0.451, + "step": 21027 + }, + { + "epoch": 0.8677065280184865, + "grad_norm": 6.248867450851052, + "learning_rate": 1.3560500873911487e-07, + "loss": 0.5592, + "step": 21028 + }, + { + "epoch": 0.8677477923578444, + "grad_norm": 4.190967841026302, + "learning_rate": 1.355217246733292e-07, + "loss": 0.4786, + "step": 21029 + }, + { + "epoch": 0.8677890566972023, + "grad_norm": 2.7853578037946893, + "learning_rate": 1.3543846498053896e-07, + "loss": 0.5483, + "step": 21030 + }, + { + "epoch": 0.8678303210365602, + "grad_norm": 3.971827418413819, + "learning_rate": 1.3535522966223153e-07, + "loss": 0.4547, + "step": 21031 + }, + { + "epoch": 0.8678715853759181, + "grad_norm": 2.755595014739236, + "learning_rate": 1.3527201871989365e-07, + "loss": 0.5263, + "step": 21032 + }, + { + "epoch": 0.867912849715276, + "grad_norm": 4.7842085900513425, + "learning_rate": 1.3518883215501148e-07, + "loss": 0.5595, + "step": 21033 + }, + { + "epoch": 0.867954114054634, + "grad_norm": 2.392259317113904, + "learning_rate": 1.3510566996907087e-07, + "loss": 0.5266, + "step": 21034 + }, + { + "epoch": 0.8679953783939919, + "grad_norm": 5.103801482612625, + "learning_rate": 1.3502253216355743e-07, + "loss": 0.5022, + "step": 21035 + }, + { + "epoch": 0.8680366427333498, + "grad_norm": 3.7682264877777807, + "learning_rate": 1.3493941873995612e-07, + "loss": 0.4733, + "step": 21036 + }, + { + "epoch": 0.8680779070727077, + "grad_norm": 2.2890534370834614, + "learning_rate": 1.3485632969975197e-07, + "loss": 0.5032, + "step": 21037 + }, + { + "epoch": 0.8681191714120657, + "grad_norm": 4.153538467081099, + "learning_rate": 1.3477326504442843e-07, + "loss": 0.4753, + "step": 21038 + }, + { + "epoch": 0.8681604357514237, + "grad_norm": 5.513358757500455, + "learning_rate": 1.3469022477546982e-07, + "loss": 0.5143, + "step": 21039 + }, + { + "epoch": 0.8682017000907816, + "grad_norm": 18.80987977320199, + "learning_rate": 1.346072088943593e-07, + "loss": 0.482, + "step": 21040 + }, + { + "epoch": 0.8682429644301395, + "grad_norm": 2.4501674232933612, + "learning_rate": 1.3452421740257998e-07, + "loss": 0.5232, + "step": 21041 + }, + { + "epoch": 0.8682842287694974, + "grad_norm": 3.185089095943071, + "learning_rate": 1.344412503016137e-07, + "loss": 0.4634, + "step": 21042 + }, + { + "epoch": 0.8683254931088553, + "grad_norm": 7.4574062306692115, + "learning_rate": 1.3435830759294278e-07, + "loss": 0.5003, + "step": 21043 + }, + { + "epoch": 0.8683667574482132, + "grad_norm": 3.2933336673680906, + "learning_rate": 1.3427538927804882e-07, + "loss": 0.5513, + "step": 21044 + }, + { + "epoch": 0.8684080217875711, + "grad_norm": 2.028332621420328, + "learning_rate": 1.3419249535841321e-07, + "loss": 0.4752, + "step": 21045 + }, + { + "epoch": 0.8684492861269291, + "grad_norm": 2.598192831596582, + "learning_rate": 1.341096258355164e-07, + "loss": 0.5084, + "step": 21046 + }, + { + "epoch": 0.868490550466287, + "grad_norm": 2.4927627626948197, + "learning_rate": 1.3402678071083818e-07, + "loss": 0.4983, + "step": 21047 + }, + { + "epoch": 0.868531814805645, + "grad_norm": 4.297238807710923, + "learning_rate": 1.339439599858589e-07, + "loss": 0.4687, + "step": 21048 + }, + { + "epoch": 0.8685730791450029, + "grad_norm": 2.1959746088998586, + "learning_rate": 1.338611636620582e-07, + "loss": 0.499, + "step": 21049 + }, + { + "epoch": 0.8686143434843608, + "grad_norm": 3.869008583710688, + "learning_rate": 1.337783917409142e-07, + "loss": 0.5162, + "step": 21050 + }, + { + "epoch": 0.8686556078237188, + "grad_norm": 2.5020790650039255, + "learning_rate": 1.3369564422390613e-07, + "loss": 0.5058, + "step": 21051 + }, + { + "epoch": 0.8686968721630767, + "grad_norm": 5.79973454880521, + "learning_rate": 1.3361292111251173e-07, + "loss": 0.5389, + "step": 21052 + }, + { + "epoch": 0.8687381365024346, + "grad_norm": 6.520223664578515, + "learning_rate": 1.3353022240820883e-07, + "loss": 0.5255, + "step": 21053 + }, + { + "epoch": 0.8687794008417925, + "grad_norm": 2.7394352247777523, + "learning_rate": 1.3344754811247445e-07, + "loss": 0.5403, + "step": 21054 + }, + { + "epoch": 0.8688206651811504, + "grad_norm": 3.7880598984344607, + "learning_rate": 1.3336489822678537e-07, + "loss": 0.513, + "step": 21055 + }, + { + "epoch": 0.8688619295205083, + "grad_norm": 10.793899531821225, + "learning_rate": 1.3328227275261812e-07, + "loss": 0.4839, + "step": 21056 + }, + { + "epoch": 0.8689031938598663, + "grad_norm": 3.2328070543839513, + "learning_rate": 1.3319967169144864e-07, + "loss": 0.4895, + "step": 21057 + }, + { + "epoch": 0.8689444581992243, + "grad_norm": 2.1269829683555233, + "learning_rate": 1.3311709504475213e-07, + "loss": 0.529, + "step": 21058 + }, + { + "epoch": 0.8689857225385822, + "grad_norm": 3.6282192501543493, + "learning_rate": 1.330345428140035e-07, + "loss": 0.4957, + "step": 21059 + }, + { + "epoch": 0.8690269868779401, + "grad_norm": 2.757173301888143, + "learning_rate": 1.3295201500067798e-07, + "loss": 0.5239, + "step": 21060 + }, + { + "epoch": 0.869068251217298, + "grad_norm": 4.103458346017083, + "learning_rate": 1.3286951160624906e-07, + "loss": 0.5416, + "step": 21061 + }, + { + "epoch": 0.8691095155566559, + "grad_norm": 2.420075592455289, + "learning_rate": 1.3278703263219084e-07, + "loss": 0.5034, + "step": 21062 + }, + { + "epoch": 0.8691507798960139, + "grad_norm": 4.743969828494339, + "learning_rate": 1.3270457807997632e-07, + "loss": 0.4902, + "step": 21063 + }, + { + "epoch": 0.8691920442353718, + "grad_norm": 6.3892438963399645, + "learning_rate": 1.3262214795107836e-07, + "loss": 0.5116, + "step": 21064 + }, + { + "epoch": 0.8692333085747297, + "grad_norm": 2.774756247001532, + "learning_rate": 1.3253974224696957e-07, + "loss": 0.485, + "step": 21065 + }, + { + "epoch": 0.8692745729140876, + "grad_norm": 3.3378058824526766, + "learning_rate": 1.32457360969122e-07, + "loss": 0.5707, + "step": 21066 + }, + { + "epoch": 0.8693158372534455, + "grad_norm": 11.000140772152237, + "learning_rate": 1.323750041190069e-07, + "loss": 0.5076, + "step": 21067 + }, + { + "epoch": 0.8693571015928035, + "grad_norm": 2.2384495601445495, + "learning_rate": 1.3229267169809544e-07, + "loss": 0.5461, + "step": 21068 + }, + { + "epoch": 0.8693983659321615, + "grad_norm": 3.529723433518829, + "learning_rate": 1.3221036370785867e-07, + "loss": 0.4605, + "step": 21069 + }, + { + "epoch": 0.8694396302715194, + "grad_norm": 2.6931060206361668, + "learning_rate": 1.3212808014976617e-07, + "loss": 0.5285, + "step": 21070 + }, + { + "epoch": 0.8694808946108773, + "grad_norm": 2.6387915179108656, + "learning_rate": 1.32045821025288e-07, + "loss": 0.5022, + "step": 21071 + }, + { + "epoch": 0.8695221589502352, + "grad_norm": 5.778903283164468, + "learning_rate": 1.3196358633589363e-07, + "loss": 0.5452, + "step": 21072 + }, + { + "epoch": 0.8695634232895931, + "grad_norm": 2.8263087455589386, + "learning_rate": 1.3188137608305217e-07, + "loss": 0.4653, + "step": 21073 + }, + { + "epoch": 0.869604687628951, + "grad_norm": 7.46763502096549, + "learning_rate": 1.3179919026823183e-07, + "loss": 0.393, + "step": 21074 + }, + { + "epoch": 0.869645951968309, + "grad_norm": 3.9606867322913777, + "learning_rate": 1.3171702889290043e-07, + "loss": 0.5503, + "step": 21075 + }, + { + "epoch": 0.8696872163076669, + "grad_norm": 4.292846213075659, + "learning_rate": 1.316348919585258e-07, + "loss": 0.5524, + "step": 21076 + }, + { + "epoch": 0.8697284806470248, + "grad_norm": 2.4234590504154334, + "learning_rate": 1.3155277946657506e-07, + "loss": 0.4694, + "step": 21077 + }, + { + "epoch": 0.8697697449863828, + "grad_norm": 2.796691173447211, + "learning_rate": 1.3147069141851526e-07, + "loss": 0.5294, + "step": 21078 + }, + { + "epoch": 0.8698110093257407, + "grad_norm": 3.5708951794471893, + "learning_rate": 1.3138862781581235e-07, + "loss": 0.5337, + "step": 21079 + }, + { + "epoch": 0.8698522736650987, + "grad_norm": 3.879293532466807, + "learning_rate": 1.3130658865993217e-07, + "loss": 0.5124, + "step": 21080 + }, + { + "epoch": 0.8698935380044566, + "grad_norm": 2.479541487103945, + "learning_rate": 1.312245739523402e-07, + "loss": 0.5304, + "step": 21081 + }, + { + "epoch": 0.8699348023438145, + "grad_norm": 3.542168574754551, + "learning_rate": 1.3114258369450182e-07, + "loss": 0.4948, + "step": 21082 + }, + { + "epoch": 0.8699760666831724, + "grad_norm": 2.685217387085202, + "learning_rate": 1.3106061788788116e-07, + "loss": 0.5566, + "step": 21083 + }, + { + "epoch": 0.8700173310225303, + "grad_norm": 2.6200282302147615, + "learning_rate": 1.309786765339422e-07, + "loss": 0.5026, + "step": 21084 + }, + { + "epoch": 0.8700585953618882, + "grad_norm": 4.5644951498796, + "learning_rate": 1.3089675963414893e-07, + "loss": 0.6018, + "step": 21085 + }, + { + "epoch": 0.8700998597012461, + "grad_norm": 3.652164711642691, + "learning_rate": 1.3081486718996505e-07, + "loss": 0.5222, + "step": 21086 + }, + { + "epoch": 0.8701411240406041, + "grad_norm": 2.442054381521746, + "learning_rate": 1.307329992028522e-07, + "loss": 0.4686, + "step": 21087 + }, + { + "epoch": 0.8701823883799621, + "grad_norm": 2.2129915435546015, + "learning_rate": 1.3065115567427338e-07, + "loss": 0.5191, + "step": 21088 + }, + { + "epoch": 0.87022365271932, + "grad_norm": 2.999843947083522, + "learning_rate": 1.3056933660569059e-07, + "loss": 0.4905, + "step": 21089 + }, + { + "epoch": 0.8702649170586779, + "grad_norm": 2.668986369712992, + "learning_rate": 1.304875419985655e-07, + "loss": 0.5111, + "step": 21090 + }, + { + "epoch": 0.8703061813980358, + "grad_norm": 5.389466346894567, + "learning_rate": 1.304057718543586e-07, + "loss": 0.513, + "step": 21091 + }, + { + "epoch": 0.8703474457373938, + "grad_norm": 3.7715285394061833, + "learning_rate": 1.303240261745307e-07, + "loss": 0.4751, + "step": 21092 + }, + { + "epoch": 0.8703887100767517, + "grad_norm": 4.2561271818377895, + "learning_rate": 1.3024230496054235e-07, + "loss": 0.4975, + "step": 21093 + }, + { + "epoch": 0.8704299744161096, + "grad_norm": 5.006003894526652, + "learning_rate": 1.3016060821385317e-07, + "loss": 0.5143, + "step": 21094 + }, + { + "epoch": 0.8704712387554675, + "grad_norm": 3.04390978641766, + "learning_rate": 1.3007893593592203e-07, + "loss": 0.4971, + "step": 21095 + }, + { + "epoch": 0.8705125030948254, + "grad_norm": 4.85568429330139, + "learning_rate": 1.299972881282082e-07, + "loss": 0.511, + "step": 21096 + }, + { + "epoch": 0.8705537674341833, + "grad_norm": 4.615260069462925, + "learning_rate": 1.2991566479216992e-07, + "loss": 0.4964, + "step": 21097 + }, + { + "epoch": 0.8705950317735413, + "grad_norm": 5.820652218970063, + "learning_rate": 1.2983406592926568e-07, + "loss": 0.4646, + "step": 21098 + }, + { + "epoch": 0.8706362961128993, + "grad_norm": 27.303469639198862, + "learning_rate": 1.2975249154095258e-07, + "loss": 0.5054, + "step": 21099 + }, + { + "epoch": 0.8706775604522572, + "grad_norm": 2.54305517186336, + "learning_rate": 1.2967094162868754e-07, + "loss": 0.4582, + "step": 21100 + }, + { + "epoch": 0.8707188247916151, + "grad_norm": 5.124262363151461, + "learning_rate": 1.295894161939275e-07, + "loss": 0.4874, + "step": 21101 + }, + { + "epoch": 0.870760089130973, + "grad_norm": 2.821461351628931, + "learning_rate": 1.2950791523812881e-07, + "loss": 0.5235, + "step": 21102 + }, + { + "epoch": 0.8708013534703309, + "grad_norm": 3.616700140886801, + "learning_rate": 1.2942643876274752e-07, + "loss": 0.5248, + "step": 21103 + }, + { + "epoch": 0.8708426178096889, + "grad_norm": 5.109633602481886, + "learning_rate": 1.2934498676923824e-07, + "loss": 0.5059, + "step": 21104 + }, + { + "epoch": 0.8708838821490468, + "grad_norm": 3.5939696259514724, + "learning_rate": 1.2926355925905648e-07, + "loss": 0.512, + "step": 21105 + }, + { + "epoch": 0.8709251464884047, + "grad_norm": 11.385693181931176, + "learning_rate": 1.2918215623365675e-07, + "loss": 0.5277, + "step": 21106 + }, + { + "epoch": 0.8709664108277626, + "grad_norm": 3.6057484821871424, + "learning_rate": 1.291007776944929e-07, + "loss": 0.5745, + "step": 21107 + }, + { + "epoch": 0.8710076751671205, + "grad_norm": 6.104878704065949, + "learning_rate": 1.2901942364301844e-07, + "loss": 0.5519, + "step": 21108 + }, + { + "epoch": 0.8710489395064785, + "grad_norm": 2.4194658132622635, + "learning_rate": 1.2893809408068686e-07, + "loss": 0.4803, + "step": 21109 + }, + { + "epoch": 0.8710902038458365, + "grad_norm": 4.426384963409309, + "learning_rate": 1.2885678900895097e-07, + "loss": 0.5441, + "step": 21110 + }, + { + "epoch": 0.8711314681851944, + "grad_norm": 10.98409528972439, + "learning_rate": 1.2877550842926267e-07, + "loss": 0.5197, + "step": 21111 + }, + { + "epoch": 0.8711727325245523, + "grad_norm": 3.109386595231179, + "learning_rate": 1.286942523430744e-07, + "loss": 0.5187, + "step": 21112 + }, + { + "epoch": 0.8712139968639102, + "grad_norm": 2.432882560776985, + "learning_rate": 1.286130207518369e-07, + "loss": 0.4919, + "step": 21113 + }, + { + "epoch": 0.8712552612032681, + "grad_norm": 4.5774724796763095, + "learning_rate": 1.2853181365700162e-07, + "loss": 0.5476, + "step": 21114 + }, + { + "epoch": 0.871296525542626, + "grad_norm": 3.2265125749179906, + "learning_rate": 1.2845063106001925e-07, + "loss": 0.5241, + "step": 21115 + }, + { + "epoch": 0.871337789881984, + "grad_norm": 1.99349664221579, + "learning_rate": 1.2836947296233948e-07, + "loss": 0.5668, + "step": 21116 + }, + { + "epoch": 0.8713790542213419, + "grad_norm": 2.3792089440660082, + "learning_rate": 1.2828833936541213e-07, + "loss": 0.505, + "step": 21117 + }, + { + "epoch": 0.8714203185606998, + "grad_norm": 3.1456946387644718, + "learning_rate": 1.2820723027068655e-07, + "loss": 0.4958, + "step": 21118 + }, + { + "epoch": 0.8714615829000578, + "grad_norm": 7.446664200131457, + "learning_rate": 1.281261456796119e-07, + "loss": 0.4885, + "step": 21119 + }, + { + "epoch": 0.8715028472394157, + "grad_norm": 5.16541862020428, + "learning_rate": 1.2804508559363586e-07, + "loss": 0.5257, + "step": 21120 + }, + { + "epoch": 0.8715441115787737, + "grad_norm": 4.480705367051574, + "learning_rate": 1.2796405001420664e-07, + "loss": 0.567, + "step": 21121 + }, + { + "epoch": 0.8715853759181316, + "grad_norm": 2.3139671643961326, + "learning_rate": 1.2788303894277188e-07, + "loss": 0.5218, + "step": 21122 + }, + { + "epoch": 0.8716266402574895, + "grad_norm": 3.868679604268512, + "learning_rate": 1.2780205238077874e-07, + "loss": 0.5373, + "step": 21123 + }, + { + "epoch": 0.8716679045968474, + "grad_norm": 5.121862137829505, + "learning_rate": 1.2772109032967345e-07, + "loss": 0.5342, + "step": 21124 + }, + { + "epoch": 0.8717091689362053, + "grad_norm": 8.387822512583877, + "learning_rate": 1.276401527909028e-07, + "loss": 0.4641, + "step": 21125 + }, + { + "epoch": 0.8717504332755632, + "grad_norm": 2.5522301888430134, + "learning_rate": 1.2755923976591165e-07, + "loss": 0.5252, + "step": 21126 + }, + { + "epoch": 0.8717916976149211, + "grad_norm": 5.5323577867976725, + "learning_rate": 1.274783512561462e-07, + "loss": 0.5737, + "step": 21127 + }, + { + "epoch": 0.8718329619542791, + "grad_norm": 8.667267853102377, + "learning_rate": 1.2739748726305061e-07, + "loss": 0.5454, + "step": 21128 + }, + { + "epoch": 0.8718742262936371, + "grad_norm": 8.234014224497168, + "learning_rate": 1.2731664778806957e-07, + "loss": 0.5091, + "step": 21129 + }, + { + "epoch": 0.871915490632995, + "grad_norm": 3.510289407978094, + "learning_rate": 1.2723583283264722e-07, + "loss": 0.5205, + "step": 21130 + }, + { + "epoch": 0.8719567549723529, + "grad_norm": 2.691984059362679, + "learning_rate": 1.271550423982273e-07, + "loss": 0.5516, + "step": 21131 + }, + { + "epoch": 0.8719980193117108, + "grad_norm": 3.274827911979122, + "learning_rate": 1.2707427648625225e-07, + "loss": 0.4596, + "step": 21132 + }, + { + "epoch": 0.8720392836510688, + "grad_norm": 4.864075405654265, + "learning_rate": 1.269935350981653e-07, + "loss": 0.522, + "step": 21133 + }, + { + "epoch": 0.8720805479904267, + "grad_norm": 4.785029182022394, + "learning_rate": 1.2691281823540845e-07, + "loss": 0.5059, + "step": 21134 + }, + { + "epoch": 0.8721218123297846, + "grad_norm": 2.208329720900197, + "learning_rate": 1.2683212589942388e-07, + "loss": 0.5079, + "step": 21135 + }, + { + "epoch": 0.8721630766691425, + "grad_norm": 3.928612114594362, + "learning_rate": 1.2675145809165245e-07, + "loss": 0.4999, + "step": 21136 + }, + { + "epoch": 0.8722043410085004, + "grad_norm": 2.3987072774163063, + "learning_rate": 1.266708148135353e-07, + "loss": 0.5164, + "step": 21137 + }, + { + "epoch": 0.8722456053478583, + "grad_norm": 3.4958222985906464, + "learning_rate": 1.2659019606651334e-07, + "loss": 0.5267, + "step": 21138 + }, + { + "epoch": 0.8722868696872164, + "grad_norm": 6.737987254003549, + "learning_rate": 1.2650960185202586e-07, + "loss": 0.5433, + "step": 21139 + }, + { + "epoch": 0.8723281340265743, + "grad_norm": 7.6844318538291585, + "learning_rate": 1.264290321715131e-07, + "loss": 0.5452, + "step": 21140 + }, + { + "epoch": 0.8723693983659322, + "grad_norm": 3.915609528212349, + "learning_rate": 1.2634848702641367e-07, + "loss": 0.4871, + "step": 21141 + }, + { + "epoch": 0.8724106627052901, + "grad_norm": 3.11286368585028, + "learning_rate": 1.2626796641816663e-07, + "loss": 0.5712, + "step": 21142 + }, + { + "epoch": 0.872451927044648, + "grad_norm": 2.1134374779408414, + "learning_rate": 1.2618747034821032e-07, + "loss": 0.4725, + "step": 21143 + }, + { + "epoch": 0.8724931913840059, + "grad_norm": 16.387908831468803, + "learning_rate": 1.2610699881798276e-07, + "loss": 0.5538, + "step": 21144 + }, + { + "epoch": 0.8725344557233639, + "grad_norm": 3.930882890468045, + "learning_rate": 1.2602655182892082e-07, + "loss": 0.4996, + "step": 21145 + }, + { + "epoch": 0.8725757200627218, + "grad_norm": 2.427358598975581, + "learning_rate": 1.2594612938246198e-07, + "loss": 0.5162, + "step": 21146 + }, + { + "epoch": 0.8726169844020797, + "grad_norm": 3.67911779026009, + "learning_rate": 1.2586573148004277e-07, + "loss": 0.505, + "step": 21147 + }, + { + "epoch": 0.8726582487414376, + "grad_norm": 24.534021146900727, + "learning_rate": 1.257853581230989e-07, + "loss": 0.5812, + "step": 21148 + }, + { + "epoch": 0.8726995130807956, + "grad_norm": 3.4579559368840496, + "learning_rate": 1.2570500931306632e-07, + "loss": 0.5002, + "step": 21149 + }, + { + "epoch": 0.8727407774201535, + "grad_norm": 8.699335065449096, + "learning_rate": 1.2562468505138013e-07, + "loss": 0.5017, + "step": 21150 + }, + { + "epoch": 0.8727820417595115, + "grad_norm": 2.725352891039995, + "learning_rate": 1.2554438533947566e-07, + "loss": 0.4865, + "step": 21151 + }, + { + "epoch": 0.8728233060988694, + "grad_norm": 7.471753809928758, + "learning_rate": 1.2546411017878673e-07, + "loss": 0.4767, + "step": 21152 + }, + { + "epoch": 0.8728645704382273, + "grad_norm": 4.7752643135619985, + "learning_rate": 1.2538385957074705e-07, + "loss": 0.4941, + "step": 21153 + }, + { + "epoch": 0.8729058347775852, + "grad_norm": 2.1856387597444265, + "learning_rate": 1.2530363351679048e-07, + "loss": 0.5712, + "step": 21154 + }, + { + "epoch": 0.8729470991169431, + "grad_norm": 4.589211770101268, + "learning_rate": 1.2522343201835e-07, + "loss": 0.5503, + "step": 21155 + }, + { + "epoch": 0.872988363456301, + "grad_norm": 7.914338339873594, + "learning_rate": 1.2514325507685836e-07, + "loss": 0.4976, + "step": 21156 + }, + { + "epoch": 0.873029627795659, + "grad_norm": 2.59639197786674, + "learning_rate": 1.2506310269374737e-07, + "loss": 0.4762, + "step": 21157 + }, + { + "epoch": 0.8730708921350169, + "grad_norm": 4.277936926812934, + "learning_rate": 1.249829748704489e-07, + "loss": 0.5466, + "step": 21158 + }, + { + "epoch": 0.8731121564743748, + "grad_norm": 6.066138418362205, + "learning_rate": 1.249028716083943e-07, + "loss": 0.5183, + "step": 21159 + }, + { + "epoch": 0.8731534208137328, + "grad_norm": 3.3383426281931716, + "learning_rate": 1.248227929090146e-07, + "loss": 0.4497, + "step": 21160 + }, + { + "epoch": 0.8731946851530907, + "grad_norm": 3.7565124679972053, + "learning_rate": 1.2474273877373966e-07, + "loss": 0.5008, + "step": 21161 + }, + { + "epoch": 0.8732359494924486, + "grad_norm": 2.8622191114848907, + "learning_rate": 1.246627092039998e-07, + "loss": 0.548, + "step": 21162 + }, + { + "epoch": 0.8732772138318066, + "grad_norm": 4.592884804619067, + "learning_rate": 1.245827042012246e-07, + "loss": 0.53, + "step": 21163 + }, + { + "epoch": 0.8733184781711645, + "grad_norm": 8.215711736168423, + "learning_rate": 1.2450272376684335e-07, + "loss": 0.496, + "step": 21164 + }, + { + "epoch": 0.8733597425105224, + "grad_norm": 2.267317334794594, + "learning_rate": 1.244227679022843e-07, + "loss": 0.4715, + "step": 21165 + }, + { + "epoch": 0.8734010068498803, + "grad_norm": 2.808805949408183, + "learning_rate": 1.243428366089756e-07, + "loss": 0.4774, + "step": 21166 + }, + { + "epoch": 0.8734422711892382, + "grad_norm": 9.00593410231957, + "learning_rate": 1.2426292988834514e-07, + "loss": 0.5347, + "step": 21167 + }, + { + "epoch": 0.8734835355285961, + "grad_norm": 2.483336413661991, + "learning_rate": 1.2418304774182077e-07, + "loss": 0.5209, + "step": 21168 + }, + { + "epoch": 0.8735247998679541, + "grad_norm": 3.950944953974098, + "learning_rate": 1.241031901708285e-07, + "loss": 0.5227, + "step": 21169 + }, + { + "epoch": 0.8735660642073121, + "grad_norm": 4.837003541029484, + "learning_rate": 1.240233571767952e-07, + "loss": 0.5488, + "step": 21170 + }, + { + "epoch": 0.87360732854667, + "grad_norm": 2.9861233756242216, + "learning_rate": 1.2394354876114705e-07, + "loss": 0.5453, + "step": 21171 + }, + { + "epoch": 0.8736485928860279, + "grad_norm": 2.311406375383293, + "learning_rate": 1.2386376492530959e-07, + "loss": 0.4656, + "step": 21172 + }, + { + "epoch": 0.8736898572253858, + "grad_norm": 3.3781908177887847, + "learning_rate": 1.237840056707077e-07, + "loss": 0.5367, + "step": 21173 + }, + { + "epoch": 0.8737311215647438, + "grad_norm": 3.821896812476973, + "learning_rate": 1.237042709987662e-07, + "loss": 0.4792, + "step": 21174 + }, + { + "epoch": 0.8737723859041017, + "grad_norm": 7.194834538094699, + "learning_rate": 1.2362456091090928e-07, + "loss": 0.5205, + "step": 21175 + }, + { + "epoch": 0.8738136502434596, + "grad_norm": 2.8105749317888513, + "learning_rate": 1.235448754085612e-07, + "loss": 0.5457, + "step": 21176 + }, + { + "epoch": 0.8738549145828175, + "grad_norm": 12.642743159536062, + "learning_rate": 1.234652144931449e-07, + "loss": 0.5255, + "step": 21177 + }, + { + "epoch": 0.8738961789221754, + "grad_norm": 2.987585692144815, + "learning_rate": 1.233855781660835e-07, + "loss": 0.4913, + "step": 21178 + }, + { + "epoch": 0.8739374432615333, + "grad_norm": 3.2512341314173523, + "learning_rate": 1.2330596642879927e-07, + "loss": 0.5036, + "step": 21179 + }, + { + "epoch": 0.8739787076008914, + "grad_norm": 2.7110233463807787, + "learning_rate": 1.2322637928271447e-07, + "loss": 0.5561, + "step": 21180 + }, + { + "epoch": 0.8740199719402493, + "grad_norm": 4.730356764002029, + "learning_rate": 1.2314681672925077e-07, + "loss": 0.4692, + "step": 21181 + }, + { + "epoch": 0.8740612362796072, + "grad_norm": 2.4995360702941083, + "learning_rate": 1.230672787698292e-07, + "loss": 0.5065, + "step": 21182 + }, + { + "epoch": 0.8741025006189651, + "grad_norm": 6.020569083609466, + "learning_rate": 1.2298776540587048e-07, + "loss": 0.5357, + "step": 21183 + }, + { + "epoch": 0.874143764958323, + "grad_norm": 6.325153716728668, + "learning_rate": 1.2290827663879546e-07, + "loss": 0.4571, + "step": 21184 + }, + { + "epoch": 0.8741850292976809, + "grad_norm": 3.6993139673317064, + "learning_rate": 1.2282881247002314e-07, + "loss": 0.4765, + "step": 21185 + }, + { + "epoch": 0.8742262936370389, + "grad_norm": 2.3888234718912957, + "learning_rate": 1.2274937290097344e-07, + "loss": 0.5317, + "step": 21186 + }, + { + "epoch": 0.8742675579763968, + "grad_norm": 2.754754432512818, + "learning_rate": 1.2266995793306534e-07, + "loss": 0.5449, + "step": 21187 + }, + { + "epoch": 0.8743088223157547, + "grad_norm": 2.1583381045811647, + "learning_rate": 1.225905675677176e-07, + "loss": 0.5126, + "step": 21188 + }, + { + "epoch": 0.8743500866551126, + "grad_norm": 6.148606012660194, + "learning_rate": 1.2251120180634767e-07, + "loss": 0.5233, + "step": 21189 + }, + { + "epoch": 0.8743913509944706, + "grad_norm": 2.967040432247561, + "learning_rate": 1.2243186065037383e-07, + "loss": 0.5683, + "step": 21190 + }, + { + "epoch": 0.8744326153338285, + "grad_norm": 2.5589920273973985, + "learning_rate": 1.223525441012132e-07, + "loss": 0.4886, + "step": 21191 + }, + { + "epoch": 0.8744738796731865, + "grad_norm": 3.1473021656580307, + "learning_rate": 1.2227325216028223e-07, + "loss": 0.5055, + "step": 21192 + }, + { + "epoch": 0.8745151440125444, + "grad_norm": 1.820894511700207, + "learning_rate": 1.221939848289979e-07, + "loss": 0.4689, + "step": 21193 + }, + { + "epoch": 0.8745564083519023, + "grad_norm": 3.286173795801798, + "learning_rate": 1.2211474210877544e-07, + "loss": 0.4627, + "step": 21194 + }, + { + "epoch": 0.8745976726912602, + "grad_norm": 11.10566574417767, + "learning_rate": 1.220355240010305e-07, + "loss": 0.5539, + "step": 21195 + }, + { + "epoch": 0.8746389370306181, + "grad_norm": 4.4128151859921365, + "learning_rate": 1.2195633050717836e-07, + "loss": 0.5507, + "step": 21196 + }, + { + "epoch": 0.874680201369976, + "grad_norm": 3.16882698998016, + "learning_rate": 1.2187716162863382e-07, + "loss": 0.4565, + "step": 21197 + }, + { + "epoch": 0.874721465709334, + "grad_norm": 3.1486885338735586, + "learning_rate": 1.217980173668103e-07, + "loss": 0.477, + "step": 21198 + }, + { + "epoch": 0.8747627300486919, + "grad_norm": 2.7346982901597867, + "learning_rate": 1.2171889772312196e-07, + "loss": 0.5435, + "step": 21199 + }, + { + "epoch": 0.8748039943880499, + "grad_norm": 2.4139569632128746, + "learning_rate": 1.2163980269898217e-07, + "loss": 0.4628, + "step": 21200 + }, + { + "epoch": 0.8748452587274078, + "grad_norm": 3.3215002754328067, + "learning_rate": 1.2156073229580383e-07, + "loss": 0.4528, + "step": 21201 + }, + { + "epoch": 0.8748865230667657, + "grad_norm": 3.3474778643697345, + "learning_rate": 1.214816865149988e-07, + "loss": 0.5507, + "step": 21202 + }, + { + "epoch": 0.8749277874061236, + "grad_norm": 4.2132749110582415, + "learning_rate": 1.2140266535797944e-07, + "loss": 0.5115, + "step": 21203 + }, + { + "epoch": 0.8749690517454816, + "grad_norm": 3.112527916111436, + "learning_rate": 1.213236688261573e-07, + "loss": 0.5435, + "step": 21204 + }, + { + "epoch": 0.8750103160848395, + "grad_norm": 2.833727400138702, + "learning_rate": 1.212446969209436e-07, + "loss": 0.4843, + "step": 21205 + }, + { + "epoch": 0.8750515804241974, + "grad_norm": 6.573090190852049, + "learning_rate": 1.2116574964374833e-07, + "loss": 0.5548, + "step": 21206 + }, + { + "epoch": 0.8750928447635553, + "grad_norm": 2.6679625115787338, + "learning_rate": 1.2108682699598206e-07, + "loss": 0.5548, + "step": 21207 + }, + { + "epoch": 0.8751341091029132, + "grad_norm": 3.5084542099537313, + "learning_rate": 1.2100792897905465e-07, + "loss": 0.5442, + "step": 21208 + }, + { + "epoch": 0.8751753734422711, + "grad_norm": 20.90432983593213, + "learning_rate": 1.2092905559437567e-07, + "loss": 0.5016, + "step": 21209 + }, + { + "epoch": 0.8752166377816292, + "grad_norm": 2.126629628370326, + "learning_rate": 1.2085020684335328e-07, + "loss": 0.495, + "step": 21210 + }, + { + "epoch": 0.8752579021209871, + "grad_norm": 3.788005187937855, + "learning_rate": 1.2077138272739624e-07, + "loss": 0.442, + "step": 21211 + }, + { + "epoch": 0.875299166460345, + "grad_norm": 5.096003282661707, + "learning_rate": 1.206925832479127e-07, + "loss": 0.5656, + "step": 21212 + }, + { + "epoch": 0.8753404307997029, + "grad_norm": 3.8421812472143757, + "learning_rate": 1.2061380840631042e-07, + "loss": 0.4969, + "step": 21213 + }, + { + "epoch": 0.8753816951390608, + "grad_norm": 2.685546624407244, + "learning_rate": 1.205350582039959e-07, + "loss": 0.5649, + "step": 21214 + }, + { + "epoch": 0.8754229594784187, + "grad_norm": 8.089616216880268, + "learning_rate": 1.204563326423762e-07, + "loss": 0.5194, + "step": 21215 + }, + { + "epoch": 0.8754642238177767, + "grad_norm": 1.877521451034749, + "learning_rate": 1.2037763172285737e-07, + "loss": 0.4813, + "step": 21216 + }, + { + "epoch": 0.8755054881571346, + "grad_norm": 2.1655354862591834, + "learning_rate": 1.202989554468456e-07, + "loss": 0.4457, + "step": 21217 + }, + { + "epoch": 0.8755467524964925, + "grad_norm": 4.8808234703304265, + "learning_rate": 1.202203038157461e-07, + "loss": 0.5314, + "step": 21218 + }, + { + "epoch": 0.8755880168358504, + "grad_norm": 8.235004487359564, + "learning_rate": 1.2014167683096328e-07, + "loss": 0.5373, + "step": 21219 + }, + { + "epoch": 0.8756292811752084, + "grad_norm": 3.190881616700569, + "learning_rate": 1.2006307449390196e-07, + "loss": 0.4836, + "step": 21220 + }, + { + "epoch": 0.8756705455145664, + "grad_norm": 4.4369949059789775, + "learning_rate": 1.199844968059664e-07, + "loss": 0.4993, + "step": 21221 + }, + { + "epoch": 0.8757118098539243, + "grad_norm": 3.694649653180732, + "learning_rate": 1.1990594376856013e-07, + "loss": 0.5103, + "step": 21222 + }, + { + "epoch": 0.8757530741932822, + "grad_norm": 3.4092451155391768, + "learning_rate": 1.1982741538308585e-07, + "loss": 0.5033, + "step": 21223 + }, + { + "epoch": 0.8757943385326401, + "grad_norm": 2.402101615627521, + "learning_rate": 1.1974891165094658e-07, + "loss": 0.5244, + "step": 21224 + }, + { + "epoch": 0.875835602871998, + "grad_norm": 2.202863265482977, + "learning_rate": 1.1967043257354493e-07, + "loss": 0.4761, + "step": 21225 + }, + { + "epoch": 0.8758768672113559, + "grad_norm": 4.300745945206791, + "learning_rate": 1.1959197815228223e-07, + "loss": 0.4626, + "step": 21226 + }, + { + "epoch": 0.8759181315507139, + "grad_norm": 2.6974929634661113, + "learning_rate": 1.1951354838856005e-07, + "loss": 0.5337, + "step": 21227 + }, + { + "epoch": 0.8759593958900718, + "grad_norm": 4.200649109743471, + "learning_rate": 1.1943514328377925e-07, + "loss": 0.4904, + "step": 21228 + }, + { + "epoch": 0.8760006602294297, + "grad_norm": 2.170126032254537, + "learning_rate": 1.193567628393407e-07, + "loss": 0.4884, + "step": 21229 + }, + { + "epoch": 0.8760419245687876, + "grad_norm": 2.946847319038491, + "learning_rate": 1.19278407056644e-07, + "loss": 0.503, + "step": 21230 + }, + { + "epoch": 0.8760831889081456, + "grad_norm": 3.0589038704632734, + "learning_rate": 1.1920007593708915e-07, + "loss": 0.4928, + "step": 21231 + }, + { + "epoch": 0.8761244532475035, + "grad_norm": 4.708083492476579, + "learning_rate": 1.1912176948207487e-07, + "loss": 0.532, + "step": 21232 + }, + { + "epoch": 0.8761657175868615, + "grad_norm": 3.5040085012483257, + "learning_rate": 1.1904348769300039e-07, + "loss": 0.4763, + "step": 21233 + }, + { + "epoch": 0.8762069819262194, + "grad_norm": 4.868071634082166, + "learning_rate": 1.1896523057126407e-07, + "loss": 0.5063, + "step": 21234 + }, + { + "epoch": 0.8762482462655773, + "grad_norm": 3.01097815484835, + "learning_rate": 1.1888699811826331e-07, + "loss": 0.5173, + "step": 21235 + }, + { + "epoch": 0.8762895106049352, + "grad_norm": 15.045874598008654, + "learning_rate": 1.1880879033539566e-07, + "loss": 0.489, + "step": 21236 + }, + { + "epoch": 0.8763307749442931, + "grad_norm": 2.978080365715569, + "learning_rate": 1.1873060722405832e-07, + "loss": 0.5196, + "step": 21237 + }, + { + "epoch": 0.876372039283651, + "grad_norm": 4.620549044602001, + "learning_rate": 1.1865244878564785e-07, + "loss": 0.6071, + "step": 21238 + }, + { + "epoch": 0.876413303623009, + "grad_norm": 3.81972162336934, + "learning_rate": 1.1857431502156013e-07, + "loss": 0.4965, + "step": 21239 + }, + { + "epoch": 0.8764545679623669, + "grad_norm": 6.132669176501229, + "learning_rate": 1.1849620593319105e-07, + "loss": 0.4962, + "step": 21240 + }, + { + "epoch": 0.8764958323017249, + "grad_norm": 5.564920444912389, + "learning_rate": 1.1841812152193548e-07, + "loss": 0.5166, + "step": 21241 + }, + { + "epoch": 0.8765370966410828, + "grad_norm": 4.680377195000809, + "learning_rate": 1.1834006178918882e-07, + "loss": 0.5568, + "step": 21242 + }, + { + "epoch": 0.8765783609804407, + "grad_norm": 7.352069955936874, + "learning_rate": 1.1826202673634479e-07, + "loss": 0.5022, + "step": 21243 + }, + { + "epoch": 0.8766196253197986, + "grad_norm": 6.453880550035723, + "learning_rate": 1.1818401636479775e-07, + "loss": 0.4559, + "step": 21244 + }, + { + "epoch": 0.8766608896591566, + "grad_norm": 3.15234397756344, + "learning_rate": 1.1810603067594078e-07, + "loss": 0.5073, + "step": 21245 + }, + { + "epoch": 0.8767021539985145, + "grad_norm": 3.2186636567640794, + "learning_rate": 1.1802806967116724e-07, + "loss": 0.5387, + "step": 21246 + }, + { + "epoch": 0.8767434183378724, + "grad_norm": 22.981563079570613, + "learning_rate": 1.1795013335186938e-07, + "loss": 0.6067, + "step": 21247 + }, + { + "epoch": 0.8767846826772303, + "grad_norm": 3.819743481404894, + "learning_rate": 1.178722217194394e-07, + "loss": 0.501, + "step": 21248 + }, + { + "epoch": 0.8768259470165882, + "grad_norm": 3.8331477643387117, + "learning_rate": 1.1779433477526919e-07, + "loss": 0.5102, + "step": 21249 + }, + { + "epoch": 0.8768672113559461, + "grad_norm": 2.306902050986424, + "learning_rate": 1.1771647252075012e-07, + "loss": 0.517, + "step": 21250 + }, + { + "epoch": 0.8769084756953042, + "grad_norm": 2.6258515985900712, + "learning_rate": 1.1763863495727261e-07, + "loss": 0.4802, + "step": 21251 + }, + { + "epoch": 0.8769497400346621, + "grad_norm": 5.651509927750198, + "learning_rate": 1.1756082208622703e-07, + "loss": 0.49, + "step": 21252 + }, + { + "epoch": 0.87699100437402, + "grad_norm": 3.2219837937909106, + "learning_rate": 1.174830339090036e-07, + "loss": 0.4528, + "step": 21253 + }, + { + "epoch": 0.8770322687133779, + "grad_norm": 2.647278002543525, + "learning_rate": 1.1740527042699206e-07, + "loss": 0.4971, + "step": 21254 + }, + { + "epoch": 0.8770735330527358, + "grad_norm": 6.857875019074116, + "learning_rate": 1.1732753164158077e-07, + "loss": 0.5007, + "step": 21255 + }, + { + "epoch": 0.8771147973920937, + "grad_norm": 9.51320244568043, + "learning_rate": 1.1724981755415865e-07, + "loss": 0.5203, + "step": 21256 + }, + { + "epoch": 0.8771560617314517, + "grad_norm": 2.573703507066547, + "learning_rate": 1.1717212816611423e-07, + "loss": 0.5813, + "step": 21257 + }, + { + "epoch": 0.8771973260708096, + "grad_norm": 29.76581708861096, + "learning_rate": 1.1709446347883457e-07, + "loss": 0.5795, + "step": 21258 + }, + { + "epoch": 0.8772385904101675, + "grad_norm": 2.7716742155181855, + "learning_rate": 1.1701682349370774e-07, + "loss": 0.5428, + "step": 21259 + }, + { + "epoch": 0.8772798547495254, + "grad_norm": 2.984786227618525, + "learning_rate": 1.1693920821211961e-07, + "loss": 0.4495, + "step": 21260 + }, + { + "epoch": 0.8773211190888834, + "grad_norm": 3.3845038835668055, + "learning_rate": 1.1686161763545728e-07, + "loss": 0.6017, + "step": 21261 + }, + { + "epoch": 0.8773623834282414, + "grad_norm": 1.9294320588720515, + "learning_rate": 1.1678405176510676e-07, + "loss": 0.4795, + "step": 21262 + }, + { + "epoch": 0.8774036477675993, + "grad_norm": 4.70784254997762, + "learning_rate": 1.1670651060245313e-07, + "loss": 0.4895, + "step": 21263 + }, + { + "epoch": 0.8774449121069572, + "grad_norm": 2.667893809669271, + "learning_rate": 1.1662899414888161e-07, + "loss": 0.5297, + "step": 21264 + }, + { + "epoch": 0.8774861764463151, + "grad_norm": 2.0341317621860218, + "learning_rate": 1.1655150240577695e-07, + "loss": 0.4573, + "step": 21265 + }, + { + "epoch": 0.877527440785673, + "grad_norm": 8.981928423639811, + "learning_rate": 1.164740353745235e-07, + "loss": 0.4663, + "step": 21266 + }, + { + "epoch": 0.8775687051250309, + "grad_norm": 4.603985743851837, + "learning_rate": 1.1639659305650468e-07, + "loss": 0.5495, + "step": 21267 + }, + { + "epoch": 0.8776099694643889, + "grad_norm": 3.418753618077685, + "learning_rate": 1.1631917545310389e-07, + "loss": 0.4776, + "step": 21268 + }, + { + "epoch": 0.8776512338037468, + "grad_norm": 3.355261378771576, + "learning_rate": 1.1624178256570417e-07, + "loss": 0.5175, + "step": 21269 + }, + { + "epoch": 0.8776924981431047, + "grad_norm": 3.2634073039911744, + "learning_rate": 1.1616441439568809e-07, + "loss": 0.4926, + "step": 21270 + }, + { + "epoch": 0.8777337624824627, + "grad_norm": 3.005445737338441, + "learning_rate": 1.1608707094443721e-07, + "loss": 0.4885, + "step": 21271 + }, + { + "epoch": 0.8777750268218206, + "grad_norm": 3.244938443549802, + "learning_rate": 1.1600975221333326e-07, + "loss": 0.4974, + "step": 21272 + }, + { + "epoch": 0.8778162911611785, + "grad_norm": 9.67115015001733, + "learning_rate": 1.1593245820375714e-07, + "loss": 0.5191, + "step": 21273 + }, + { + "epoch": 0.8778575555005365, + "grad_norm": 4.424160430869882, + "learning_rate": 1.158551889170899e-07, + "loss": 0.4933, + "step": 21274 + }, + { + "epoch": 0.8778988198398944, + "grad_norm": 9.481101590306704, + "learning_rate": 1.1577794435471162e-07, + "loss": 0.5578, + "step": 21275 + }, + { + "epoch": 0.8779400841792523, + "grad_norm": 6.981897152961824, + "learning_rate": 1.1570072451800201e-07, + "loss": 0.5399, + "step": 21276 + }, + { + "epoch": 0.8779813485186102, + "grad_norm": 2.1857750936307894, + "learning_rate": 1.156235294083403e-07, + "loss": 0.4637, + "step": 21277 + }, + { + "epoch": 0.8780226128579681, + "grad_norm": 3.912605321794412, + "learning_rate": 1.1554635902710558e-07, + "loss": 0.5288, + "step": 21278 + }, + { + "epoch": 0.878063877197326, + "grad_norm": 3.0869886552976658, + "learning_rate": 1.1546921337567656e-07, + "loss": 0.5086, + "step": 21279 + }, + { + "epoch": 0.878105141536684, + "grad_norm": 4.031003100658746, + "learning_rate": 1.1539209245543064e-07, + "loss": 0.4998, + "step": 21280 + }, + { + "epoch": 0.878146405876042, + "grad_norm": 9.85883457495704, + "learning_rate": 1.1531499626774572e-07, + "loss": 0.4459, + "step": 21281 + }, + { + "epoch": 0.8781876702153999, + "grad_norm": 3.2542100893945975, + "learning_rate": 1.1523792481399902e-07, + "loss": 0.4814, + "step": 21282 + }, + { + "epoch": 0.8782289345547578, + "grad_norm": 5.078208347046421, + "learning_rate": 1.1516087809556747e-07, + "loss": 0.4451, + "step": 21283 + }, + { + "epoch": 0.8782701988941157, + "grad_norm": 3.713124023417499, + "learning_rate": 1.150838561138266e-07, + "loss": 0.5551, + "step": 21284 + }, + { + "epoch": 0.8783114632334736, + "grad_norm": 2.8379438961311823, + "learning_rate": 1.1500685887015249e-07, + "loss": 0.4652, + "step": 21285 + }, + { + "epoch": 0.8783527275728316, + "grad_norm": 2.4414493560081425, + "learning_rate": 1.1492988636592072e-07, + "loss": 0.5311, + "step": 21286 + }, + { + "epoch": 0.8783939919121895, + "grad_norm": 4.3446836012702175, + "learning_rate": 1.1485293860250617e-07, + "loss": 0.5593, + "step": 21287 + }, + { + "epoch": 0.8784352562515474, + "grad_norm": 2.5676301175052467, + "learning_rate": 1.1477601558128309e-07, + "loss": 0.5169, + "step": 21288 + }, + { + "epoch": 0.8784765205909053, + "grad_norm": 4.37260786352112, + "learning_rate": 1.146991173036257e-07, + "loss": 0.4993, + "step": 21289 + }, + { + "epoch": 0.8785177849302632, + "grad_norm": 5.181212721897331, + "learning_rate": 1.1462224377090758e-07, + "loss": 0.5024, + "step": 21290 + }, + { + "epoch": 0.8785590492696211, + "grad_norm": 2.0455604202445676, + "learning_rate": 1.1454539498450212e-07, + "loss": 0.4546, + "step": 21291 + }, + { + "epoch": 0.8786003136089792, + "grad_norm": 3.131212750060536, + "learning_rate": 1.1446857094578139e-07, + "loss": 0.4829, + "step": 21292 + }, + { + "epoch": 0.8786415779483371, + "grad_norm": 3.482830082437211, + "learning_rate": 1.1439177165611831e-07, + "loss": 0.4536, + "step": 21293 + }, + { + "epoch": 0.878682842287695, + "grad_norm": 2.165891018815232, + "learning_rate": 1.1431499711688425e-07, + "loss": 0.4498, + "step": 21294 + }, + { + "epoch": 0.8787241066270529, + "grad_norm": 2.1897043219112513, + "learning_rate": 1.1423824732945098e-07, + "loss": 0.527, + "step": 21295 + }, + { + "epoch": 0.8787653709664108, + "grad_norm": 5.946046598302483, + "learning_rate": 1.141615222951894e-07, + "loss": 0.5169, + "step": 21296 + }, + { + "epoch": 0.8788066353057687, + "grad_norm": 2.97350562333369, + "learning_rate": 1.1408482201546954e-07, + "loss": 0.4403, + "step": 21297 + }, + { + "epoch": 0.8788478996451267, + "grad_norm": 7.338328337129636, + "learning_rate": 1.1400814649166186e-07, + "loss": 0.5168, + "step": 21298 + }, + { + "epoch": 0.8788891639844846, + "grad_norm": 2.06366286683033, + "learning_rate": 1.1393149572513589e-07, + "loss": 0.4865, + "step": 21299 + }, + { + "epoch": 0.8789304283238425, + "grad_norm": 1.8696994949577943, + "learning_rate": 1.1385486971726105e-07, + "loss": 0.4744, + "step": 21300 + }, + { + "epoch": 0.8789716926632004, + "grad_norm": 2.6072195972926293, + "learning_rate": 1.1377826846940559e-07, + "loss": 0.4811, + "step": 21301 + }, + { + "epoch": 0.8790129570025584, + "grad_norm": 4.431213296346959, + "learning_rate": 1.1370169198293806e-07, + "loss": 0.5276, + "step": 21302 + }, + { + "epoch": 0.8790542213419164, + "grad_norm": 2.7903706306182516, + "learning_rate": 1.1362514025922655e-07, + "loss": 0.506, + "step": 21303 + }, + { + "epoch": 0.8790954856812743, + "grad_norm": 2.6457103830331676, + "learning_rate": 1.1354861329963795e-07, + "loss": 0.4902, + "step": 21304 + }, + { + "epoch": 0.8791367500206322, + "grad_norm": 6.527915599615088, + "learning_rate": 1.1347211110553952e-07, + "loss": 0.4889, + "step": 21305 + }, + { + "epoch": 0.8791780143599901, + "grad_norm": 2.7552228451082117, + "learning_rate": 1.133956336782978e-07, + "loss": 0.4752, + "step": 21306 + }, + { + "epoch": 0.879219278699348, + "grad_norm": 2.735987700709808, + "learning_rate": 1.1331918101927891e-07, + "loss": 0.5061, + "step": 21307 + }, + { + "epoch": 0.8792605430387059, + "grad_norm": 2.8954870016711154, + "learning_rate": 1.1324275312984823e-07, + "loss": 0.4676, + "step": 21308 + }, + { + "epoch": 0.8793018073780638, + "grad_norm": 2.19178692718424, + "learning_rate": 1.1316635001137132e-07, + "loss": 0.5092, + "step": 21309 + }, + { + "epoch": 0.8793430717174218, + "grad_norm": 2.677126143392099, + "learning_rate": 1.1308997166521262e-07, + "loss": 0.5438, + "step": 21310 + }, + { + "epoch": 0.8793843360567797, + "grad_norm": 3.574562914506971, + "learning_rate": 1.1301361809273636e-07, + "loss": 0.5176, + "step": 21311 + }, + { + "epoch": 0.8794256003961377, + "grad_norm": 3.048975536483788, + "learning_rate": 1.1293728929530695e-07, + "loss": 0.6002, + "step": 21312 + }, + { + "epoch": 0.8794668647354956, + "grad_norm": 3.7353639566231873, + "learning_rate": 1.1286098527428711e-07, + "loss": 0.4723, + "step": 21313 + }, + { + "epoch": 0.8795081290748535, + "grad_norm": 2.87383623725815, + "learning_rate": 1.127847060310403e-07, + "loss": 0.4969, + "step": 21314 + }, + { + "epoch": 0.8795493934142115, + "grad_norm": 4.586207179716019, + "learning_rate": 1.1270845156692888e-07, + "loss": 0.4869, + "step": 21315 + }, + { + "epoch": 0.8795906577535694, + "grad_norm": 3.807557764733861, + "learning_rate": 1.1263222188331512e-07, + "loss": 0.4675, + "step": 21316 + }, + { + "epoch": 0.8796319220929273, + "grad_norm": 3.213103277300832, + "learning_rate": 1.1255601698156042e-07, + "loss": 0.5586, + "step": 21317 + }, + { + "epoch": 0.8796731864322852, + "grad_norm": 3.7200578305446936, + "learning_rate": 1.1247983686302604e-07, + "loss": 0.4935, + "step": 21318 + }, + { + "epoch": 0.8797144507716431, + "grad_norm": 2.152441251248504, + "learning_rate": 1.1240368152907287e-07, + "loss": 0.535, + "step": 21319 + }, + { + "epoch": 0.879755715111001, + "grad_norm": 3.992237133299339, + "learning_rate": 1.1232755098106135e-07, + "loss": 0.5346, + "step": 21320 + }, + { + "epoch": 0.879796979450359, + "grad_norm": 2.997992444809587, + "learning_rate": 1.1225144522035102e-07, + "loss": 0.5425, + "step": 21321 + }, + { + "epoch": 0.879838243789717, + "grad_norm": 3.5459264978223946, + "learning_rate": 1.121753642483015e-07, + "loss": 0.5172, + "step": 21322 + }, + { + "epoch": 0.8798795081290749, + "grad_norm": 3.1134125893249207, + "learning_rate": 1.12099308066272e-07, + "loss": 0.4762, + "step": 21323 + }, + { + "epoch": 0.8799207724684328, + "grad_norm": 3.716653364986428, + "learning_rate": 1.1202327667562079e-07, + "loss": 0.4876, + "step": 21324 + }, + { + "epoch": 0.8799620368077907, + "grad_norm": 2.1745407311041083, + "learning_rate": 1.1194727007770577e-07, + "loss": 0.5325, + "step": 21325 + }, + { + "epoch": 0.8800033011471486, + "grad_norm": 8.347956598209977, + "learning_rate": 1.1187128827388504e-07, + "loss": 0.5439, + "step": 21326 + }, + { + "epoch": 0.8800445654865066, + "grad_norm": 2.680976022405435, + "learning_rate": 1.117953312655155e-07, + "loss": 0.4636, + "step": 21327 + }, + { + "epoch": 0.8800858298258645, + "grad_norm": 2.5754116566275655, + "learning_rate": 1.1171939905395456e-07, + "loss": 0.555, + "step": 21328 + }, + { + "epoch": 0.8801270941652224, + "grad_norm": 3.0847693128069813, + "learning_rate": 1.1164349164055765e-07, + "loss": 0.5163, + "step": 21329 + }, + { + "epoch": 0.8801683585045803, + "grad_norm": 17.225186807447574, + "learning_rate": 1.1156760902668118e-07, + "loss": 0.5046, + "step": 21330 + }, + { + "epoch": 0.8802096228439382, + "grad_norm": 2.3217768883239045, + "learning_rate": 1.1149175121368055e-07, + "loss": 0.5412, + "step": 21331 + }, + { + "epoch": 0.8802508871832962, + "grad_norm": 22.309514890567716, + "learning_rate": 1.1141591820291102e-07, + "loss": 0.5535, + "step": 21332 + }, + { + "epoch": 0.8802921515226542, + "grad_norm": 4.131691503199726, + "learning_rate": 1.113401099957267e-07, + "loss": 0.5445, + "step": 21333 + }, + { + "epoch": 0.8803334158620121, + "grad_norm": 7.642285786292191, + "learning_rate": 1.1126432659348179e-07, + "loss": 0.5093, + "step": 21334 + }, + { + "epoch": 0.88037468020137, + "grad_norm": 6.079858058267154, + "learning_rate": 1.1118856799753025e-07, + "loss": 0.566, + "step": 21335 + }, + { + "epoch": 0.8804159445407279, + "grad_norm": 3.086234687589534, + "learning_rate": 1.111128342092253e-07, + "loss": 0.4932, + "step": 21336 + }, + { + "epoch": 0.8804572088800858, + "grad_norm": 11.77379752791069, + "learning_rate": 1.1103712522991954e-07, + "loss": 0.5004, + "step": 21337 + }, + { + "epoch": 0.8804984732194437, + "grad_norm": 9.436990669915424, + "learning_rate": 1.1096144106096523e-07, + "loss": 0.5587, + "step": 21338 + }, + { + "epoch": 0.8805397375588017, + "grad_norm": 2.391268886465787, + "learning_rate": 1.1088578170371443e-07, + "loss": 0.497, + "step": 21339 + }, + { + "epoch": 0.8805810018981596, + "grad_norm": 5.575115576208202, + "learning_rate": 1.1081014715951877e-07, + "loss": 0.5206, + "step": 21340 + }, + { + "epoch": 0.8806222662375175, + "grad_norm": 3.4796218515286883, + "learning_rate": 1.1073453742972894e-07, + "loss": 0.4754, + "step": 21341 + }, + { + "epoch": 0.8806635305768755, + "grad_norm": 4.239826128021966, + "learning_rate": 1.1065895251569557e-07, + "loss": 0.5596, + "step": 21342 + }, + { + "epoch": 0.8807047949162334, + "grad_norm": 5.93581512265984, + "learning_rate": 1.1058339241876892e-07, + "loss": 0.5346, + "step": 21343 + }, + { + "epoch": 0.8807460592555914, + "grad_norm": 10.269325677586883, + "learning_rate": 1.1050785714029904e-07, + "loss": 0.5032, + "step": 21344 + }, + { + "epoch": 0.8807873235949493, + "grad_norm": 9.624108566928248, + "learning_rate": 1.1043234668163455e-07, + "loss": 0.4718, + "step": 21345 + }, + { + "epoch": 0.8808285879343072, + "grad_norm": 3.7136450813134383, + "learning_rate": 1.1035686104412434e-07, + "loss": 0.5321, + "step": 21346 + }, + { + "epoch": 0.8808698522736651, + "grad_norm": 2.0367681697983833, + "learning_rate": 1.1028140022911704e-07, + "loss": 0.4985, + "step": 21347 + }, + { + "epoch": 0.880911116613023, + "grad_norm": 2.421495719927346, + "learning_rate": 1.102059642379607e-07, + "loss": 0.5338, + "step": 21348 + }, + { + "epoch": 0.8809523809523809, + "grad_norm": 3.986138837135948, + "learning_rate": 1.1013055307200277e-07, + "loss": 0.4864, + "step": 21349 + }, + { + "epoch": 0.8809936452917388, + "grad_norm": 24.424202240369905, + "learning_rate": 1.1005516673258965e-07, + "loss": 0.569, + "step": 21350 + }, + { + "epoch": 0.8810349096310968, + "grad_norm": 3.035611868126115, + "learning_rate": 1.0997980522106826e-07, + "loss": 0.4859, + "step": 21351 + }, + { + "epoch": 0.8810761739704547, + "grad_norm": 2.7262234254878157, + "learning_rate": 1.0990446853878505e-07, + "loss": 0.5031, + "step": 21352 + }, + { + "epoch": 0.8811174383098127, + "grad_norm": 2.8804903485064375, + "learning_rate": 1.0982915668708576e-07, + "loss": 0.4879, + "step": 21353 + }, + { + "epoch": 0.8811587026491706, + "grad_norm": 4.445699909851198, + "learning_rate": 1.0975386966731498e-07, + "loss": 0.5134, + "step": 21354 + }, + { + "epoch": 0.8811999669885285, + "grad_norm": 8.425676118244645, + "learning_rate": 1.0967860748081815e-07, + "loss": 0.504, + "step": 21355 + }, + { + "epoch": 0.8812412313278865, + "grad_norm": 3.436852687895743, + "learning_rate": 1.0960337012893934e-07, + "loss": 0.5143, + "step": 21356 + }, + { + "epoch": 0.8812824956672444, + "grad_norm": 2.159300190662792, + "learning_rate": 1.0952815761302281e-07, + "loss": 0.5358, + "step": 21357 + }, + { + "epoch": 0.8813237600066023, + "grad_norm": 2.8245317993182653, + "learning_rate": 1.094529699344115e-07, + "loss": 0.5366, + "step": 21358 + }, + { + "epoch": 0.8813650243459602, + "grad_norm": 2.257686232436718, + "learning_rate": 1.09377807094449e-07, + "loss": 0.5064, + "step": 21359 + }, + { + "epoch": 0.8814062886853181, + "grad_norm": 2.8917736205501496, + "learning_rate": 1.0930266909447739e-07, + "loss": 0.508, + "step": 21360 + }, + { + "epoch": 0.881447553024676, + "grad_norm": 3.782247609246126, + "learning_rate": 1.092275559358396e-07, + "loss": 0.5508, + "step": 21361 + }, + { + "epoch": 0.881488817364034, + "grad_norm": 2.30112390967977, + "learning_rate": 1.0915246761987673e-07, + "loss": 0.4821, + "step": 21362 + }, + { + "epoch": 0.881530081703392, + "grad_norm": 4.2988393506993186, + "learning_rate": 1.0907740414792988e-07, + "loss": 0.459, + "step": 21363 + }, + { + "epoch": 0.8815713460427499, + "grad_norm": 2.8519718252538535, + "learning_rate": 1.0900236552134014e-07, + "loss": 0.4817, + "step": 21364 + }, + { + "epoch": 0.8816126103821078, + "grad_norm": 4.229238757262219, + "learning_rate": 1.0892735174144825e-07, + "loss": 0.4897, + "step": 21365 + }, + { + "epoch": 0.8816538747214657, + "grad_norm": 4.513551761849179, + "learning_rate": 1.0885236280959349e-07, + "loss": 0.4783, + "step": 21366 + }, + { + "epoch": 0.8816951390608236, + "grad_norm": 2.5780383512427845, + "learning_rate": 1.0877739872711562e-07, + "loss": 0.524, + "step": 21367 + }, + { + "epoch": 0.8817364034001816, + "grad_norm": 2.689428902036793, + "learning_rate": 1.0870245949535356e-07, + "loss": 0.5709, + "step": 21368 + }, + { + "epoch": 0.8817776677395395, + "grad_norm": 26.68975841101571, + "learning_rate": 1.086275451156466e-07, + "loss": 0.5166, + "step": 21369 + }, + { + "epoch": 0.8818189320788974, + "grad_norm": 6.440062716292033, + "learning_rate": 1.0855265558933197e-07, + "loss": 0.4732, + "step": 21370 + }, + { + "epoch": 0.8818601964182553, + "grad_norm": 2.6427774447406462, + "learning_rate": 1.0847779091774779e-07, + "loss": 0.5172, + "step": 21371 + }, + { + "epoch": 0.8819014607576132, + "grad_norm": 4.3702434267065895, + "learning_rate": 1.0840295110223131e-07, + "loss": 0.5315, + "step": 21372 + }, + { + "epoch": 0.8819427250969712, + "grad_norm": 5.019686617846552, + "learning_rate": 1.0832813614411963e-07, + "loss": 0.4959, + "step": 21373 + }, + { + "epoch": 0.8819839894363292, + "grad_norm": 3.674746338974242, + "learning_rate": 1.0825334604474868e-07, + "loss": 0.5121, + "step": 21374 + }, + { + "epoch": 0.8820252537756871, + "grad_norm": 2.8583996416422335, + "learning_rate": 1.0817858080545472e-07, + "loss": 0.5226, + "step": 21375 + }, + { + "epoch": 0.882066518115045, + "grad_norm": 2.538388151268894, + "learning_rate": 1.0810384042757288e-07, + "loss": 0.5561, + "step": 21376 + }, + { + "epoch": 0.8821077824544029, + "grad_norm": 2.8366915350560595, + "learning_rate": 1.0802912491243855e-07, + "loss": 0.4319, + "step": 21377 + }, + { + "epoch": 0.8821490467937608, + "grad_norm": 2.681207007095374, + "learning_rate": 1.0795443426138635e-07, + "loss": 0.5671, + "step": 21378 + }, + { + "epoch": 0.8821903111331187, + "grad_norm": 9.261889898803997, + "learning_rate": 1.078797684757502e-07, + "loss": 0.5064, + "step": 21379 + }, + { + "epoch": 0.8822315754724767, + "grad_norm": 2.815877025769544, + "learning_rate": 1.078051275568639e-07, + "loss": 0.4863, + "step": 21380 + }, + { + "epoch": 0.8822728398118346, + "grad_norm": 2.368057099416814, + "learning_rate": 1.0773051150606084e-07, + "loss": 0.4609, + "step": 21381 + }, + { + "epoch": 0.8823141041511925, + "grad_norm": 5.202548275838747, + "learning_rate": 1.0765592032467364e-07, + "loss": 0.5173, + "step": 21382 + }, + { + "epoch": 0.8823553684905505, + "grad_norm": 5.135837486951442, + "learning_rate": 1.075813540140349e-07, + "loss": 0.5828, + "step": 21383 + }, + { + "epoch": 0.8823966328299084, + "grad_norm": 2.9016191629554196, + "learning_rate": 1.075068125754764e-07, + "loss": 0.453, + "step": 21384 + }, + { + "epoch": 0.8824378971692663, + "grad_norm": 5.7760673057322025, + "learning_rate": 1.0743229601032989e-07, + "loss": 0.5024, + "step": 21385 + }, + { + "epoch": 0.8824791615086243, + "grad_norm": 2.3271287885230434, + "learning_rate": 1.0735780431992615e-07, + "loss": 0.5162, + "step": 21386 + }, + { + "epoch": 0.8825204258479822, + "grad_norm": 1.964450829493114, + "learning_rate": 1.0728333750559577e-07, + "loss": 0.4727, + "step": 21387 + }, + { + "epoch": 0.8825616901873401, + "grad_norm": 4.441117290051605, + "learning_rate": 1.0720889556866937e-07, + "loss": 0.5258, + "step": 21388 + }, + { + "epoch": 0.882602954526698, + "grad_norm": 2.919034852176001, + "learning_rate": 1.0713447851047602e-07, + "loss": 0.4851, + "step": 21389 + }, + { + "epoch": 0.8826442188660559, + "grad_norm": 4.895609966947648, + "learning_rate": 1.0706008633234554e-07, + "loss": 0.4608, + "step": 21390 + }, + { + "epoch": 0.8826854832054138, + "grad_norm": 5.9070342686313495, + "learning_rate": 1.0698571903560617e-07, + "loss": 0.4986, + "step": 21391 + }, + { + "epoch": 0.8827267475447718, + "grad_norm": 2.3903198133377903, + "learning_rate": 1.0691137662158684e-07, + "loss": 0.5198, + "step": 21392 + }, + { + "epoch": 0.8827680118841298, + "grad_norm": 4.000906689074687, + "learning_rate": 1.0683705909161517e-07, + "loss": 0.5396, + "step": 21393 + }, + { + "epoch": 0.8828092762234877, + "grad_norm": 2.6182986108732123, + "learning_rate": 1.067627664470191e-07, + "loss": 0.4978, + "step": 21394 + }, + { + "epoch": 0.8828505405628456, + "grad_norm": 3.6031459714715366, + "learning_rate": 1.0668849868912506e-07, + "loss": 0.511, + "step": 21395 + }, + { + "epoch": 0.8828918049022035, + "grad_norm": 3.694720874228853, + "learning_rate": 1.0661425581925998e-07, + "loss": 0.4996, + "step": 21396 + }, + { + "epoch": 0.8829330692415615, + "grad_norm": 2.804365949151788, + "learning_rate": 1.0654003783875e-07, + "loss": 0.5078, + "step": 21397 + }, + { + "epoch": 0.8829743335809194, + "grad_norm": 2.199395883245368, + "learning_rate": 1.0646584474892101e-07, + "loss": 0.5059, + "step": 21398 + }, + { + "epoch": 0.8830155979202773, + "grad_norm": 3.9274538893050783, + "learning_rate": 1.06391676551098e-07, + "loss": 0.4969, + "step": 21399 + }, + { + "epoch": 0.8830568622596352, + "grad_norm": 3.0396020376047637, + "learning_rate": 1.0631753324660603e-07, + "loss": 0.486, + "step": 21400 + }, + { + "epoch": 0.8830981265989931, + "grad_norm": 2.630859349244932, + "learning_rate": 1.0624341483676941e-07, + "loss": 0.502, + "step": 21401 + }, + { + "epoch": 0.883139390938351, + "grad_norm": 3.3521518569915747, + "learning_rate": 1.0616932132291207e-07, + "loss": 0.5035, + "step": 21402 + }, + { + "epoch": 0.8831806552777091, + "grad_norm": 2.7097077098361337, + "learning_rate": 1.0609525270635728e-07, + "loss": 0.5757, + "step": 21403 + }, + { + "epoch": 0.883221919617067, + "grad_norm": 5.293839915237002, + "learning_rate": 1.0602120898842832e-07, + "loss": 0.5215, + "step": 21404 + }, + { + "epoch": 0.8832631839564249, + "grad_norm": 6.034787037879833, + "learning_rate": 1.059471901704478e-07, + "loss": 0.5304, + "step": 21405 + }, + { + "epoch": 0.8833044482957828, + "grad_norm": 2.414451575779205, + "learning_rate": 1.0587319625373798e-07, + "loss": 0.5053, + "step": 21406 + }, + { + "epoch": 0.8833457126351407, + "grad_norm": 4.924813017463339, + "learning_rate": 1.0579922723962032e-07, + "loss": 0.5456, + "step": 21407 + }, + { + "epoch": 0.8833869769744986, + "grad_norm": 3.7808361841007923, + "learning_rate": 1.0572528312941627e-07, + "loss": 0.4903, + "step": 21408 + }, + { + "epoch": 0.8834282413138566, + "grad_norm": 3.1573179908245947, + "learning_rate": 1.056513639244464e-07, + "loss": 0.5125, + "step": 21409 + }, + { + "epoch": 0.8834695056532145, + "grad_norm": 1.8210875907282411, + "learning_rate": 1.055774696260317e-07, + "loss": 0.4534, + "step": 21410 + }, + { + "epoch": 0.8835107699925724, + "grad_norm": 4.418195231932573, + "learning_rate": 1.0550360023549143e-07, + "loss": 0.5198, + "step": 21411 + }, + { + "epoch": 0.8835520343319303, + "grad_norm": 2.4198797685784146, + "learning_rate": 1.054297557541452e-07, + "loss": 0.4996, + "step": 21412 + }, + { + "epoch": 0.8835932986712882, + "grad_norm": 7.038883675642612, + "learning_rate": 1.0535593618331246e-07, + "loss": 0.4942, + "step": 21413 + }, + { + "epoch": 0.8836345630106462, + "grad_norm": 5.904932223598816, + "learning_rate": 1.0528214152431165e-07, + "loss": 0.5428, + "step": 21414 + }, + { + "epoch": 0.8836758273500042, + "grad_norm": 5.5361859501352875, + "learning_rate": 1.0520837177846087e-07, + "loss": 0.5093, + "step": 21415 + }, + { + "epoch": 0.8837170916893621, + "grad_norm": 3.2462701329365187, + "learning_rate": 1.051346269470776e-07, + "loss": 0.5496, + "step": 21416 + }, + { + "epoch": 0.88375835602872, + "grad_norm": 1.968364024641761, + "learning_rate": 1.0506090703147925e-07, + "loss": 0.5173, + "step": 21417 + }, + { + "epoch": 0.8837996203680779, + "grad_norm": 3.8507877721552544, + "learning_rate": 1.0498721203298295e-07, + "loss": 0.5171, + "step": 21418 + }, + { + "epoch": 0.8838408847074358, + "grad_norm": 2.561177642346401, + "learning_rate": 1.0491354195290465e-07, + "loss": 0.5025, + "step": 21419 + }, + { + "epoch": 0.8838821490467937, + "grad_norm": 1.860149998520723, + "learning_rate": 1.0483989679256045e-07, + "loss": 0.4956, + "step": 21420 + }, + { + "epoch": 0.8839234133861517, + "grad_norm": 9.20036529214207, + "learning_rate": 1.0476627655326582e-07, + "loss": 0.5173, + "step": 21421 + }, + { + "epoch": 0.8839646777255096, + "grad_norm": 2.09191345723334, + "learning_rate": 1.0469268123633601e-07, + "loss": 0.4728, + "step": 21422 + }, + { + "epoch": 0.8840059420648675, + "grad_norm": 2.507286732829026, + "learning_rate": 1.0461911084308534e-07, + "loss": 0.4803, + "step": 21423 + }, + { + "epoch": 0.8840472064042255, + "grad_norm": 2.8823855782971317, + "learning_rate": 1.0454556537482791e-07, + "loss": 0.5358, + "step": 21424 + }, + { + "epoch": 0.8840884707435834, + "grad_norm": 2.9628385780743964, + "learning_rate": 1.0447204483287765e-07, + "loss": 0.536, + "step": 21425 + }, + { + "epoch": 0.8841297350829413, + "grad_norm": 3.2069997358796227, + "learning_rate": 1.0439854921854803e-07, + "loss": 0.5818, + "step": 21426 + }, + { + "epoch": 0.8841709994222993, + "grad_norm": 4.709684013344933, + "learning_rate": 1.0432507853315132e-07, + "loss": 0.5546, + "step": 21427 + }, + { + "epoch": 0.8842122637616572, + "grad_norm": 3.4584877813220323, + "learning_rate": 1.0425163277800032e-07, + "loss": 0.5321, + "step": 21428 + }, + { + "epoch": 0.8842535281010151, + "grad_norm": 2.8718583321497015, + "learning_rate": 1.0417821195440663e-07, + "loss": 0.5266, + "step": 21429 + }, + { + "epoch": 0.884294792440373, + "grad_norm": 2.452109753415225, + "learning_rate": 1.0410481606368172e-07, + "loss": 0.4927, + "step": 21430 + }, + { + "epoch": 0.8843360567797309, + "grad_norm": 2.5822355052523536, + "learning_rate": 1.0403144510713719e-07, + "loss": 0.5382, + "step": 21431 + }, + { + "epoch": 0.8843773211190888, + "grad_norm": 7.570606842901964, + "learning_rate": 1.03958099086083e-07, + "loss": 0.4778, + "step": 21432 + }, + { + "epoch": 0.8844185854584468, + "grad_norm": 4.609886950760238, + "learning_rate": 1.0388477800182944e-07, + "loss": 0.5621, + "step": 21433 + }, + { + "epoch": 0.8844598497978048, + "grad_norm": 3.383755404667063, + "learning_rate": 1.0381148185568629e-07, + "loss": 0.5586, + "step": 21434 + }, + { + "epoch": 0.8845011141371627, + "grad_norm": 7.9077275100967235, + "learning_rate": 1.0373821064896317e-07, + "loss": 0.5444, + "step": 21435 + }, + { + "epoch": 0.8845423784765206, + "grad_norm": 7.7196804368195995, + "learning_rate": 1.0366496438296818e-07, + "loss": 0.5, + "step": 21436 + }, + { + "epoch": 0.8845836428158785, + "grad_norm": 11.97671100030696, + "learning_rate": 1.0359174305900998e-07, + "loss": 0.4965, + "step": 21437 + }, + { + "epoch": 0.8846249071552365, + "grad_norm": 23.62902446781967, + "learning_rate": 1.0351854667839666e-07, + "loss": 0.4699, + "step": 21438 + }, + { + "epoch": 0.8846661714945944, + "grad_norm": 2.1316437449188363, + "learning_rate": 1.0344537524243569e-07, + "loss": 0.4876, + "step": 21439 + }, + { + "epoch": 0.8847074358339523, + "grad_norm": 2.8562942430291027, + "learning_rate": 1.0337222875243369e-07, + "loss": 0.4828, + "step": 21440 + }, + { + "epoch": 0.8847487001733102, + "grad_norm": 2.23856019129936, + "learning_rate": 1.0329910720969793e-07, + "loss": 0.4922, + "step": 21441 + }, + { + "epoch": 0.8847899645126681, + "grad_norm": 4.419507206588557, + "learning_rate": 1.0322601061553372e-07, + "loss": 0.458, + "step": 21442 + }, + { + "epoch": 0.884831228852026, + "grad_norm": 2.5883268067279253, + "learning_rate": 1.0315293897124751e-07, + "loss": 0.5205, + "step": 21443 + }, + { + "epoch": 0.8848724931913841, + "grad_norm": 2.0516369338345837, + "learning_rate": 1.0307989227814374e-07, + "loss": 0.5017, + "step": 21444 + }, + { + "epoch": 0.884913757530742, + "grad_norm": 4.196469791925347, + "learning_rate": 1.0300687053752788e-07, + "loss": 0.4992, + "step": 21445 + }, + { + "epoch": 0.8849550218700999, + "grad_norm": 3.7941960675139597, + "learning_rate": 1.029338737507039e-07, + "loss": 0.5179, + "step": 21446 + }, + { + "epoch": 0.8849962862094578, + "grad_norm": 4.883289129491339, + "learning_rate": 1.0286090191897624e-07, + "loss": 0.4489, + "step": 21447 + }, + { + "epoch": 0.8850375505488157, + "grad_norm": 2.48899751166501, + "learning_rate": 1.0278795504364769e-07, + "loss": 0.5233, + "step": 21448 + }, + { + "epoch": 0.8850788148881736, + "grad_norm": 2.8597056846131643, + "learning_rate": 1.0271503312602154e-07, + "loss": 0.5674, + "step": 21449 + }, + { + "epoch": 0.8851200792275316, + "grad_norm": 2.278126992715575, + "learning_rate": 1.0264213616740026e-07, + "loss": 0.4728, + "step": 21450 + }, + { + "epoch": 0.8851613435668895, + "grad_norm": 2.827372321360267, + "learning_rate": 1.0256926416908647e-07, + "loss": 0.405, + "step": 21451 + }, + { + "epoch": 0.8852026079062474, + "grad_norm": 2.6312322644762034, + "learning_rate": 1.0249641713238111e-07, + "loss": 0.5116, + "step": 21452 + }, + { + "epoch": 0.8852438722456053, + "grad_norm": 16.50148984894465, + "learning_rate": 1.0242359505858585e-07, + "loss": 0.5291, + "step": 21453 + }, + { + "epoch": 0.8852851365849633, + "grad_norm": 3.120866173794796, + "learning_rate": 1.023507979490016e-07, + "loss": 0.4729, + "step": 21454 + }, + { + "epoch": 0.8853264009243212, + "grad_norm": 5.770131189965178, + "learning_rate": 1.0227802580492818e-07, + "loss": 0.5078, + "step": 21455 + }, + { + "epoch": 0.8853676652636792, + "grad_norm": 2.719156501604031, + "learning_rate": 1.0220527862766605e-07, + "loss": 0.4703, + "step": 21456 + }, + { + "epoch": 0.8854089296030371, + "grad_norm": 2.4661882681336604, + "learning_rate": 1.0213255641851416e-07, + "loss": 0.4761, + "step": 21457 + }, + { + "epoch": 0.885450193942395, + "grad_norm": 3.8945114327963584, + "learning_rate": 1.020598591787718e-07, + "loss": 0.509, + "step": 21458 + }, + { + "epoch": 0.8854914582817529, + "grad_norm": 6.387581215083744, + "learning_rate": 1.0198718690973763e-07, + "loss": 0.4738, + "step": 21459 + }, + { + "epoch": 0.8855327226211108, + "grad_norm": 2.3420086806891165, + "learning_rate": 1.0191453961270941e-07, + "loss": 0.5346, + "step": 21460 + }, + { + "epoch": 0.8855739869604687, + "grad_norm": 2.1855823285123024, + "learning_rate": 1.0184191728898495e-07, + "loss": 0.5087, + "step": 21461 + }, + { + "epoch": 0.8856152512998267, + "grad_norm": 3.6631554888448026, + "learning_rate": 1.0176931993986155e-07, + "loss": 0.4851, + "step": 21462 + }, + { + "epoch": 0.8856565156391846, + "grad_norm": 2.6207966235826268, + "learning_rate": 1.0169674756663616e-07, + "loss": 0.514, + "step": 21463 + }, + { + "epoch": 0.8856977799785426, + "grad_norm": 2.2764866256214984, + "learning_rate": 1.0162420017060458e-07, + "loss": 0.4984, + "step": 21464 + }, + { + "epoch": 0.8857390443179005, + "grad_norm": 5.628366189395357, + "learning_rate": 1.0155167775306295e-07, + "loss": 0.5119, + "step": 21465 + }, + { + "epoch": 0.8857803086572584, + "grad_norm": 4.5940047351582844, + "learning_rate": 1.014791803153069e-07, + "loss": 0.4829, + "step": 21466 + }, + { + "epoch": 0.8858215729966163, + "grad_norm": 7.083835379761409, + "learning_rate": 1.0140670785863138e-07, + "loss": 0.512, + "step": 21467 + }, + { + "epoch": 0.8858628373359743, + "grad_norm": 2.0098592153480075, + "learning_rate": 1.0133426038433085e-07, + "loss": 0.4937, + "step": 21468 + }, + { + "epoch": 0.8859041016753322, + "grad_norm": 4.386517750775241, + "learning_rate": 1.0126183789369897e-07, + "loss": 0.5215, + "step": 21469 + }, + { + "epoch": 0.8859453660146901, + "grad_norm": 20.57778004436309, + "learning_rate": 1.0118944038803002e-07, + "loss": 0.5005, + "step": 21470 + }, + { + "epoch": 0.885986630354048, + "grad_norm": 4.098593957877303, + "learning_rate": 1.011170678686168e-07, + "loss": 0.5044, + "step": 21471 + }, + { + "epoch": 0.8860278946934059, + "grad_norm": 2.7774608164036234, + "learning_rate": 1.0104472033675243e-07, + "loss": 0.5302, + "step": 21472 + }, + { + "epoch": 0.8860691590327638, + "grad_norm": 2.2984681407938194, + "learning_rate": 1.0097239779372874e-07, + "loss": 0.4766, + "step": 21473 + }, + { + "epoch": 0.8861104233721218, + "grad_norm": 3.8813548671565266, + "learning_rate": 1.00900100240838e-07, + "loss": 0.4706, + "step": 21474 + }, + { + "epoch": 0.8861516877114798, + "grad_norm": 2.5153942864980783, + "learning_rate": 1.0082782767937138e-07, + "loss": 0.4932, + "step": 21475 + }, + { + "epoch": 0.8861929520508377, + "grad_norm": 3.6001538253031815, + "learning_rate": 1.0075558011062031e-07, + "loss": 0.5257, + "step": 21476 + }, + { + "epoch": 0.8862342163901956, + "grad_norm": 2.757496527423578, + "learning_rate": 1.006833575358746e-07, + "loss": 0.5248, + "step": 21477 + }, + { + "epoch": 0.8862754807295535, + "grad_norm": 2.9232725195022207, + "learning_rate": 1.0061115995642472e-07, + "loss": 0.5151, + "step": 21478 + }, + { + "epoch": 0.8863167450689114, + "grad_norm": 2.2127362909291386, + "learning_rate": 1.0053898737356049e-07, + "loss": 0.518, + "step": 21479 + }, + { + "epoch": 0.8863580094082694, + "grad_norm": 2.25343568481778, + "learning_rate": 1.0046683978857085e-07, + "loss": 0.4927, + "step": 21480 + }, + { + "epoch": 0.8863992737476273, + "grad_norm": 2.4957856172124617, + "learning_rate": 1.003947172027443e-07, + "loss": 0.5295, + "step": 21481 + }, + { + "epoch": 0.8864405380869852, + "grad_norm": 6.974997410706504, + "learning_rate": 1.0032261961736927e-07, + "loss": 0.4886, + "step": 21482 + }, + { + "epoch": 0.8864818024263431, + "grad_norm": 2.753390688735415, + "learning_rate": 1.0025054703373393e-07, + "loss": 0.5084, + "step": 21483 + }, + { + "epoch": 0.886523066765701, + "grad_norm": 4.735389047159492, + "learning_rate": 1.0017849945312558e-07, + "loss": 0.5552, + "step": 21484 + }, + { + "epoch": 0.8865643311050591, + "grad_norm": 11.805602276451221, + "learning_rate": 1.0010647687683083e-07, + "loss": 0.4733, + "step": 21485 + }, + { + "epoch": 0.886605595444417, + "grad_norm": 2.181902062279984, + "learning_rate": 1.0003447930613635e-07, + "loss": 0.5064, + "step": 21486 + }, + { + "epoch": 0.8866468597837749, + "grad_norm": 4.281587264260697, + "learning_rate": 9.996250674232827e-08, + "loss": 0.4698, + "step": 21487 + }, + { + "epoch": 0.8866881241231328, + "grad_norm": 14.096395696069587, + "learning_rate": 9.989055918669238e-08, + "loss": 0.5283, + "step": 21488 + }, + { + "epoch": 0.8867293884624907, + "grad_norm": 2.758443463959175, + "learning_rate": 9.98186366405135e-08, + "loss": 0.5179, + "step": 21489 + }, + { + "epoch": 0.8867706528018486, + "grad_norm": 3.111168608705545, + "learning_rate": 9.974673910507643e-08, + "loss": 0.4605, + "step": 21490 + }, + { + "epoch": 0.8868119171412066, + "grad_norm": 2.6562074831471167, + "learning_rate": 9.967486658166547e-08, + "loss": 0.4822, + "step": 21491 + }, + { + "epoch": 0.8868531814805645, + "grad_norm": 5.6394573303677165, + "learning_rate": 9.960301907156477e-08, + "loss": 0.601, + "step": 21492 + }, + { + "epoch": 0.8868944458199224, + "grad_norm": 3.108577602914959, + "learning_rate": 9.953119657605748e-08, + "loss": 0.5241, + "step": 21493 + }, + { + "epoch": 0.8869357101592803, + "grad_norm": 2.3050015242846147, + "learning_rate": 9.945939909642604e-08, + "loss": 0.4925, + "step": 21494 + }, + { + "epoch": 0.8869769744986383, + "grad_norm": 4.005296737488721, + "learning_rate": 9.938762663395345e-08, + "loss": 0.4941, + "step": 21495 + }, + { + "epoch": 0.8870182388379962, + "grad_norm": 3.8868060762503562, + "learning_rate": 9.931587918992186e-08, + "loss": 0.5487, + "step": 21496 + }, + { + "epoch": 0.8870595031773542, + "grad_norm": 3.594004330763939, + "learning_rate": 9.924415676561238e-08, + "loss": 0.5145, + "step": 21497 + }, + { + "epoch": 0.8871007675167121, + "grad_norm": 2.2117013705548363, + "learning_rate": 9.91724593623065e-08, + "loss": 0.4638, + "step": 21498 + }, + { + "epoch": 0.88714203185607, + "grad_norm": 8.80497720081578, + "learning_rate": 9.910078698128472e-08, + "loss": 0.512, + "step": 21499 + }, + { + "epoch": 0.8871832961954279, + "grad_norm": 2.8139454986517265, + "learning_rate": 9.902913962382765e-08, + "loss": 0.4829, + "step": 21500 + }, + { + "epoch": 0.8872245605347858, + "grad_norm": 3.2669749387548057, + "learning_rate": 9.89575172912146e-08, + "loss": 0.5282, + "step": 21501 + }, + { + "epoch": 0.8872658248741437, + "grad_norm": 5.345794621403406, + "learning_rate": 9.888591998472523e-08, + "loss": 0.4518, + "step": 21502 + }, + { + "epoch": 0.8873070892135017, + "grad_norm": 2.1269713270818023, + "learning_rate": 9.881434770563835e-08, + "loss": 0.5819, + "step": 21503 + }, + { + "epoch": 0.8873483535528596, + "grad_norm": 2.655899527249582, + "learning_rate": 9.874280045523276e-08, + "loss": 0.5425, + "step": 21504 + }, + { + "epoch": 0.8873896178922176, + "grad_norm": 4.35294151650033, + "learning_rate": 9.867127823478578e-08, + "loss": 0.5242, + "step": 21505 + }, + { + "epoch": 0.8874308822315755, + "grad_norm": 2.4330141095643225, + "learning_rate": 9.859978104557537e-08, + "loss": 0.5157, + "step": 21506 + }, + { + "epoch": 0.8874721465709334, + "grad_norm": 1.8391331070037964, + "learning_rate": 9.852830888887903e-08, + "loss": 0.4493, + "step": 21507 + }, + { + "epoch": 0.8875134109102913, + "grad_norm": 8.218627134229765, + "learning_rate": 9.845686176597257e-08, + "loss": 0.5167, + "step": 21508 + }, + { + "epoch": 0.8875546752496493, + "grad_norm": 2.694578947111886, + "learning_rate": 9.838543967813313e-08, + "loss": 0.4632, + "step": 21509 + }, + { + "epoch": 0.8875959395890072, + "grad_norm": 4.602076690225486, + "learning_rate": 9.831404262663569e-08, + "loss": 0.5197, + "step": 21510 + }, + { + "epoch": 0.8876372039283651, + "grad_norm": 2.4787723662373127, + "learning_rate": 9.824267061275604e-08, + "loss": 0.5159, + "step": 21511 + }, + { + "epoch": 0.887678468267723, + "grad_norm": 3.1611824123868866, + "learning_rate": 9.817132363776887e-08, + "loss": 0.4732, + "step": 21512 + }, + { + "epoch": 0.8877197326070809, + "grad_norm": 3.5071153730721836, + "learning_rate": 9.810000170294897e-08, + "loss": 0.4928, + "step": 21513 + }, + { + "epoch": 0.8877609969464388, + "grad_norm": 4.427866842385367, + "learning_rate": 9.802870480956965e-08, + "loss": 0.5282, + "step": 21514 + }, + { + "epoch": 0.8878022612857969, + "grad_norm": 3.159722702921615, + "learning_rate": 9.795743295890508e-08, + "loss": 0.6037, + "step": 21515 + }, + { + "epoch": 0.8878435256251548, + "grad_norm": 2.1719801912418695, + "learning_rate": 9.788618615222805e-08, + "loss": 0.5656, + "step": 21516 + }, + { + "epoch": 0.8878847899645127, + "grad_norm": 2.5969203705041273, + "learning_rate": 9.781496439081156e-08, + "loss": 0.5226, + "step": 21517 + }, + { + "epoch": 0.8879260543038706, + "grad_norm": 3.6730000985484126, + "learning_rate": 9.774376767592724e-08, + "loss": 0.5046, + "step": 21518 + }, + { + "epoch": 0.8879673186432285, + "grad_norm": 2.4666713173070485, + "learning_rate": 9.76725960088471e-08, + "loss": 0.4773, + "step": 21519 + }, + { + "epoch": 0.8880085829825864, + "grad_norm": 1.7519896659516112, + "learning_rate": 9.760144939084276e-08, + "loss": 0.4528, + "step": 21520 + }, + { + "epoch": 0.8880498473219444, + "grad_norm": 4.612150098658502, + "learning_rate": 9.753032782318489e-08, + "loss": 0.4547, + "step": 21521 + }, + { + "epoch": 0.8880911116613023, + "grad_norm": 2.6062862538942833, + "learning_rate": 9.745923130714345e-08, + "loss": 0.5015, + "step": 21522 + }, + { + "epoch": 0.8881323760006602, + "grad_norm": 4.7601244249212336, + "learning_rate": 9.73881598439888e-08, + "loss": 0.5482, + "step": 21523 + }, + { + "epoch": 0.8881736403400181, + "grad_norm": 3.42348319937809, + "learning_rate": 9.731711343499039e-08, + "loss": 0.4951, + "step": 21524 + }, + { + "epoch": 0.8882149046793761, + "grad_norm": 3.3453582147796905, + "learning_rate": 9.724609208141755e-08, + "loss": 0.4856, + "step": 21525 + }, + { + "epoch": 0.888256169018734, + "grad_norm": 2.3128825269347866, + "learning_rate": 9.717509578453843e-08, + "loss": 0.4757, + "step": 21526 + }, + { + "epoch": 0.888297433358092, + "grad_norm": 1.9540932681172434, + "learning_rate": 9.710412454562151e-08, + "loss": 0.5053, + "step": 21527 + }, + { + "epoch": 0.8883386976974499, + "grad_norm": 4.088454845192614, + "learning_rate": 9.703317836593428e-08, + "loss": 0.4504, + "step": 21528 + }, + { + "epoch": 0.8883799620368078, + "grad_norm": 5.2102740904782, + "learning_rate": 9.696225724674457e-08, + "loss": 0.4974, + "step": 21529 + }, + { + "epoch": 0.8884212263761657, + "grad_norm": 2.24108120967962, + "learning_rate": 9.689136118931851e-08, + "loss": 0.4771, + "step": 21530 + }, + { + "epoch": 0.8884624907155236, + "grad_norm": 2.491166889850267, + "learning_rate": 9.682049019492279e-08, + "loss": 0.5034, + "step": 21531 + }, + { + "epoch": 0.8885037550548815, + "grad_norm": 2.715017955121021, + "learning_rate": 9.674964426482319e-08, + "loss": 0.499, + "step": 21532 + }, + { + "epoch": 0.8885450193942395, + "grad_norm": 7.263563203728825, + "learning_rate": 9.667882340028572e-08, + "loss": 0.5035, + "step": 21533 + }, + { + "epoch": 0.8885862837335974, + "grad_norm": 2.7823494973110083, + "learning_rate": 9.660802760257504e-08, + "loss": 0.5328, + "step": 21534 + }, + { + "epoch": 0.8886275480729553, + "grad_norm": 4.058232103582822, + "learning_rate": 9.653725687295545e-08, + "loss": 0.4881, + "step": 21535 + }, + { + "epoch": 0.8886688124123133, + "grad_norm": 3.3667475953323454, + "learning_rate": 9.646651121269145e-08, + "loss": 0.4841, + "step": 21536 + }, + { + "epoch": 0.8887100767516712, + "grad_norm": 1.9918456346954818, + "learning_rate": 9.639579062304688e-08, + "loss": 0.5327, + "step": 21537 + }, + { + "epoch": 0.8887513410910292, + "grad_norm": 13.020106578919115, + "learning_rate": 9.632509510528437e-08, + "loss": 0.5222, + "step": 21538 + }, + { + "epoch": 0.8887926054303871, + "grad_norm": 3.3110341632347082, + "learning_rate": 9.625442466066725e-08, + "loss": 0.5332, + "step": 21539 + }, + { + "epoch": 0.888833869769745, + "grad_norm": 4.32830975043467, + "learning_rate": 9.618377929045786e-08, + "loss": 0.4827, + "step": 21540 + }, + { + "epoch": 0.8888751341091029, + "grad_norm": 3.4884544926490904, + "learning_rate": 9.611315899591799e-08, + "loss": 0.5251, + "step": 21541 + }, + { + "epoch": 0.8889163984484608, + "grad_norm": 2.942335429626597, + "learning_rate": 9.604256377830901e-08, + "loss": 0.6014, + "step": 21542 + }, + { + "epoch": 0.8889576627878187, + "grad_norm": 2.898249589070714, + "learning_rate": 9.597199363889187e-08, + "loss": 0.5085, + "step": 21543 + }, + { + "epoch": 0.8889989271271767, + "grad_norm": 5.0944423616228445, + "learning_rate": 9.590144857892741e-08, + "loss": 0.4731, + "step": 21544 + }, + { + "epoch": 0.8890401914665346, + "grad_norm": 9.487334620263793, + "learning_rate": 9.583092859967579e-08, + "loss": 0.5651, + "step": 21545 + }, + { + "epoch": 0.8890814558058926, + "grad_norm": 3.3808851279009926, + "learning_rate": 9.576043370239634e-08, + "loss": 0.5297, + "step": 21546 + }, + { + "epoch": 0.8891227201452505, + "grad_norm": 6.63011183480896, + "learning_rate": 9.568996388834839e-08, + "loss": 0.4825, + "step": 21547 + }, + { + "epoch": 0.8891639844846084, + "grad_norm": 2.1892059339294425, + "learning_rate": 9.561951915879057e-08, + "loss": 0.4518, + "step": 21548 + }, + { + "epoch": 0.8892052488239663, + "grad_norm": 2.9271730173664743, + "learning_rate": 9.554909951498141e-08, + "loss": 0.5436, + "step": 21549 + }, + { + "epoch": 0.8892465131633243, + "grad_norm": 2.7031245852331787, + "learning_rate": 9.547870495817889e-08, + "loss": 0.5914, + "step": 21550 + }, + { + "epoch": 0.8892877775026822, + "grad_norm": 4.966188144743389, + "learning_rate": 9.540833548964018e-08, + "loss": 0.4742, + "step": 21551 + }, + { + "epoch": 0.8893290418420401, + "grad_norm": 3.2833332694014734, + "learning_rate": 9.533799111062225e-08, + "loss": 0.5241, + "step": 21552 + }, + { + "epoch": 0.889370306181398, + "grad_norm": 3.0375594678739293, + "learning_rate": 9.526767182238177e-08, + "loss": 0.5622, + "step": 21553 + }, + { + "epoch": 0.8894115705207559, + "grad_norm": 6.455798441766758, + "learning_rate": 9.519737762617508e-08, + "loss": 0.5359, + "step": 21554 + }, + { + "epoch": 0.8894528348601138, + "grad_norm": 3.942991661552955, + "learning_rate": 9.512710852325718e-08, + "loss": 0.5396, + "step": 21555 + }, + { + "epoch": 0.8894940991994719, + "grad_norm": 1.9348590768323946, + "learning_rate": 9.505686451488355e-08, + "loss": 0.4705, + "step": 21556 + }, + { + "epoch": 0.8895353635388298, + "grad_norm": 3.7786436117738895, + "learning_rate": 9.498664560230919e-08, + "loss": 0.4838, + "step": 21557 + }, + { + "epoch": 0.8895766278781877, + "grad_norm": 3.0168820691395735, + "learning_rate": 9.491645178678792e-08, + "loss": 0.4745, + "step": 21558 + }, + { + "epoch": 0.8896178922175456, + "grad_norm": 4.733890376986279, + "learning_rate": 9.48462830695741e-08, + "loss": 0.4656, + "step": 21559 + }, + { + "epoch": 0.8896591565569035, + "grad_norm": 3.0859064398507208, + "learning_rate": 9.477613945192053e-08, + "loss": 0.4789, + "step": 21560 + }, + { + "epoch": 0.8897004208962614, + "grad_norm": 2.918374332100772, + "learning_rate": 9.470602093508057e-08, + "loss": 0.4992, + "step": 21561 + }, + { + "epoch": 0.8897416852356194, + "grad_norm": 4.517936041973101, + "learning_rate": 9.463592752030669e-08, + "loss": 0.4878, + "step": 21562 + }, + { + "epoch": 0.8897829495749773, + "grad_norm": 3.194686319468768, + "learning_rate": 9.456585920885058e-08, + "loss": 0.471, + "step": 21563 + }, + { + "epoch": 0.8898242139143352, + "grad_norm": 5.569855248049312, + "learning_rate": 9.449581600196405e-08, + "loss": 0.5397, + "step": 21564 + }, + { + "epoch": 0.8898654782536931, + "grad_norm": 4.672213474615411, + "learning_rate": 9.442579790089845e-08, + "loss": 0.5073, + "step": 21565 + }, + { + "epoch": 0.8899067425930511, + "grad_norm": 3.5062421059832136, + "learning_rate": 9.435580490690426e-08, + "loss": 0.5039, + "step": 21566 + }, + { + "epoch": 0.889948006932409, + "grad_norm": 2.6611873448877503, + "learning_rate": 9.428583702123184e-08, + "loss": 0.4531, + "step": 21567 + }, + { + "epoch": 0.889989271271767, + "grad_norm": 4.71076776090385, + "learning_rate": 9.421589424513083e-08, + "loss": 0.4657, + "step": 21568 + }, + { + "epoch": 0.8900305356111249, + "grad_norm": 2.360502385817263, + "learning_rate": 9.414597657985057e-08, + "loss": 0.5364, + "step": 21569 + }, + { + "epoch": 0.8900717999504828, + "grad_norm": 2.7090039628237377, + "learning_rate": 9.407608402664042e-08, + "loss": 0.4606, + "step": 21570 + }, + { + "epoch": 0.8901130642898407, + "grad_norm": 3.5292255820066267, + "learning_rate": 9.400621658674818e-08, + "loss": 0.4582, + "step": 21571 + }, + { + "epoch": 0.8901543286291986, + "grad_norm": 7.827721002748885, + "learning_rate": 9.393637426142254e-08, + "loss": 0.5591, + "step": 21572 + }, + { + "epoch": 0.8901955929685565, + "grad_norm": 2.9567410717815523, + "learning_rate": 9.386655705191033e-08, + "loss": 0.4466, + "step": 21573 + }, + { + "epoch": 0.8902368573079145, + "grad_norm": 2.3037125079714773, + "learning_rate": 9.379676495945921e-08, + "loss": 0.429, + "step": 21574 + }, + { + "epoch": 0.8902781216472724, + "grad_norm": 6.2102270313393015, + "learning_rate": 9.372699798531553e-08, + "loss": 0.5075, + "step": 21575 + }, + { + "epoch": 0.8903193859866304, + "grad_norm": 2.7118670552708144, + "learning_rate": 9.365725613072562e-08, + "loss": 0.5332, + "step": 21576 + }, + { + "epoch": 0.8903606503259883, + "grad_norm": 5.601433514459601, + "learning_rate": 9.358753939693515e-08, + "loss": 0.4549, + "step": 21577 + }, + { + "epoch": 0.8904019146653462, + "grad_norm": 19.00229931115987, + "learning_rate": 9.351784778518995e-08, + "loss": 0.5129, + "step": 21578 + }, + { + "epoch": 0.8904431790047042, + "grad_norm": 10.462837210546816, + "learning_rate": 9.344818129673405e-08, + "loss": 0.555, + "step": 21579 + }, + { + "epoch": 0.8904844433440621, + "grad_norm": 2.62535535195957, + "learning_rate": 9.337853993281242e-08, + "loss": 0.4969, + "step": 21580 + }, + { + "epoch": 0.89052570768342, + "grad_norm": 3.1139629543050393, + "learning_rate": 9.330892369466876e-08, + "loss": 0.4816, + "step": 21581 + }, + { + "epoch": 0.8905669720227779, + "grad_norm": 2.8770889894332377, + "learning_rate": 9.323933258354705e-08, + "loss": 0.5516, + "step": 21582 + }, + { + "epoch": 0.8906082363621358, + "grad_norm": 3.125704752524757, + "learning_rate": 9.316976660068966e-08, + "loss": 0.4729, + "step": 21583 + }, + { + "epoch": 0.8906495007014937, + "grad_norm": 4.367377383388525, + "learning_rate": 9.310022574733973e-08, + "loss": 0.4921, + "step": 21584 + }, + { + "epoch": 0.8906907650408517, + "grad_norm": 7.482685183640054, + "learning_rate": 9.303071002473962e-08, + "loss": 0.4952, + "step": 21585 + }, + { + "epoch": 0.8907320293802097, + "grad_norm": 2.127379625657732, + "learning_rate": 9.296121943413033e-08, + "loss": 0.475, + "step": 21586 + }, + { + "epoch": 0.8907732937195676, + "grad_norm": 4.330283841003812, + "learning_rate": 9.28917539767537e-08, + "loss": 0.482, + "step": 21587 + }, + { + "epoch": 0.8908145580589255, + "grad_norm": 3.3680418838637514, + "learning_rate": 9.282231365385041e-08, + "loss": 0.5048, + "step": 21588 + }, + { + "epoch": 0.8908558223982834, + "grad_norm": 3.572747384456154, + "learning_rate": 9.275289846666062e-08, + "loss": 0.499, + "step": 21589 + }, + { + "epoch": 0.8908970867376413, + "grad_norm": 12.736480563098947, + "learning_rate": 9.268350841642453e-08, + "loss": 0.5848, + "step": 21590 + }, + { + "epoch": 0.8909383510769993, + "grad_norm": 3.327235078557269, + "learning_rate": 9.26141435043818e-08, + "loss": 0.5457, + "step": 21591 + }, + { + "epoch": 0.8909796154163572, + "grad_norm": 2.2182329737323814, + "learning_rate": 9.254480373177093e-08, + "loss": 0.4931, + "step": 21592 + }, + { + "epoch": 0.8910208797557151, + "grad_norm": 6.441823285901587, + "learning_rate": 9.247548909983077e-08, + "loss": 0.4866, + "step": 21593 + }, + { + "epoch": 0.891062144095073, + "grad_norm": 2.206033093995579, + "learning_rate": 9.240619960979934e-08, + "loss": 0.5051, + "step": 21594 + }, + { + "epoch": 0.8911034084344309, + "grad_norm": 5.46012671794584, + "learning_rate": 9.233693526291481e-08, + "loss": 0.4913, + "step": 21595 + }, + { + "epoch": 0.8911446727737888, + "grad_norm": 3.7658200772165364, + "learning_rate": 9.226769606041386e-08, + "loss": 0.4888, + "step": 21596 + }, + { + "epoch": 0.8911859371131469, + "grad_norm": 4.9668473759998095, + "learning_rate": 9.219848200353348e-08, + "loss": 0.5106, + "step": 21597 + }, + { + "epoch": 0.8912272014525048, + "grad_norm": 3.4993785421004486, + "learning_rate": 9.212929309351021e-08, + "loss": 0.4679, + "step": 21598 + }, + { + "epoch": 0.8912684657918627, + "grad_norm": 4.247475668762027, + "learning_rate": 9.206012933157954e-08, + "loss": 0.5494, + "step": 21599 + }, + { + "epoch": 0.8913097301312206, + "grad_norm": 3.0841202316667395, + "learning_rate": 9.199099071897698e-08, + "loss": 0.5332, + "step": 21600 + }, + { + "epoch": 0.8913509944705785, + "grad_norm": 12.05601428958141, + "learning_rate": 9.192187725693774e-08, + "loss": 0.5385, + "step": 21601 + }, + { + "epoch": 0.8913922588099364, + "grad_norm": 4.415822604243803, + "learning_rate": 9.185278894669613e-08, + "loss": 0.5513, + "step": 21602 + }, + { + "epoch": 0.8914335231492944, + "grad_norm": 3.4991374433841003, + "learning_rate": 9.178372578948651e-08, + "loss": 0.508, + "step": 21603 + }, + { + "epoch": 0.8914747874886523, + "grad_norm": 4.357993776717538, + "learning_rate": 9.171468778654224e-08, + "loss": 0.4947, + "step": 21604 + }, + { + "epoch": 0.8915160518280102, + "grad_norm": 18.465035299089504, + "learning_rate": 9.164567493909665e-08, + "loss": 0.5068, + "step": 21605 + }, + { + "epoch": 0.8915573161673681, + "grad_norm": 6.290793001348728, + "learning_rate": 9.157668724838242e-08, + "loss": 0.5071, + "step": 21606 + }, + { + "epoch": 0.8915985805067261, + "grad_norm": 2.778296135867913, + "learning_rate": 9.150772471563223e-08, + "loss": 0.5462, + "step": 21607 + }, + { + "epoch": 0.891639844846084, + "grad_norm": 2.1105785167412976, + "learning_rate": 9.143878734207711e-08, + "loss": 0.5142, + "step": 21608 + }, + { + "epoch": 0.891681109185442, + "grad_norm": 5.652530379172424, + "learning_rate": 9.136987512894923e-08, + "loss": 0.4724, + "step": 21609 + }, + { + "epoch": 0.8917223735247999, + "grad_norm": 5.6089599989873244, + "learning_rate": 9.130098807747894e-08, + "loss": 0.5219, + "step": 21610 + }, + { + "epoch": 0.8917636378641578, + "grad_norm": 2.8677639389659166, + "learning_rate": 9.123212618889743e-08, + "loss": 0.53, + "step": 21611 + }, + { + "epoch": 0.8918049022035157, + "grad_norm": 2.6233888018011875, + "learning_rate": 9.116328946443437e-08, + "loss": 0.5063, + "step": 21612 + }, + { + "epoch": 0.8918461665428736, + "grad_norm": 7.319238339431747, + "learning_rate": 9.10944779053191e-08, + "loss": 0.4996, + "step": 21613 + }, + { + "epoch": 0.8918874308822315, + "grad_norm": 3.5489787448587955, + "learning_rate": 9.102569151278084e-08, + "loss": 0.5387, + "step": 21614 + }, + { + "epoch": 0.8919286952215895, + "grad_norm": 2.7984222781493755, + "learning_rate": 9.095693028804874e-08, + "loss": 0.5036, + "step": 21615 + }, + { + "epoch": 0.8919699595609474, + "grad_norm": 8.148410774916998, + "learning_rate": 9.088819423235067e-08, + "loss": 0.5031, + "step": 21616 + }, + { + "epoch": 0.8920112239003054, + "grad_norm": 18.474368564075178, + "learning_rate": 9.081948334691431e-08, + "loss": 0.5356, + "step": 21617 + }, + { + "epoch": 0.8920524882396633, + "grad_norm": 2.6616049900158947, + "learning_rate": 9.075079763296735e-08, + "loss": 0.4486, + "step": 21618 + }, + { + "epoch": 0.8920937525790212, + "grad_norm": 3.7934599567915686, + "learning_rate": 9.068213709173662e-08, + "loss": 0.5357, + "step": 21619 + }, + { + "epoch": 0.8921350169183792, + "grad_norm": 3.0394803942081983, + "learning_rate": 9.061350172444848e-08, + "loss": 0.5456, + "step": 21620 + }, + { + "epoch": 0.8921762812577371, + "grad_norm": 3.331373517017927, + "learning_rate": 9.054489153232881e-08, + "loss": 0.4937, + "step": 21621 + }, + { + "epoch": 0.892217545597095, + "grad_norm": 2.7189877663116113, + "learning_rate": 9.047630651660327e-08, + "loss": 0.516, + "step": 21622 + }, + { + "epoch": 0.8922588099364529, + "grad_norm": 2.219643109862733, + "learning_rate": 9.040774667849722e-08, + "loss": 0.4924, + "step": 21623 + }, + { + "epoch": 0.8923000742758108, + "grad_norm": 4.648450524894083, + "learning_rate": 9.033921201923484e-08, + "loss": 0.5193, + "step": 21624 + }, + { + "epoch": 0.8923413386151687, + "grad_norm": 2.7777208747068993, + "learning_rate": 9.027070254004083e-08, + "loss": 0.5787, + "step": 21625 + }, + { + "epoch": 0.8923826029545266, + "grad_norm": 1.9388926530502777, + "learning_rate": 9.020221824213853e-08, + "loss": 0.4613, + "step": 21626 + }, + { + "epoch": 0.8924238672938847, + "grad_norm": 2.481482445698427, + "learning_rate": 9.013375912675131e-08, + "loss": 0.5126, + "step": 21627 + }, + { + "epoch": 0.8924651316332426, + "grad_norm": 2.7111156202497124, + "learning_rate": 9.006532519510236e-08, + "loss": 0.5057, + "step": 21628 + }, + { + "epoch": 0.8925063959726005, + "grad_norm": 6.049750267009779, + "learning_rate": 8.999691644841352e-08, + "loss": 0.5141, + "step": 21629 + }, + { + "epoch": 0.8925476603119584, + "grad_norm": 4.218500757047701, + "learning_rate": 8.992853288790698e-08, + "loss": 0.4902, + "step": 21630 + }, + { + "epoch": 0.8925889246513163, + "grad_norm": 2.314239869342839, + "learning_rate": 8.986017451480444e-08, + "loss": 0.4877, + "step": 21631 + }, + { + "epoch": 0.8926301889906743, + "grad_norm": 2.4576123916419403, + "learning_rate": 8.979184133032692e-08, + "loss": 0.453, + "step": 21632 + }, + { + "epoch": 0.8926714533300322, + "grad_norm": 2.447560017062563, + "learning_rate": 8.972353333569478e-08, + "loss": 0.4951, + "step": 21633 + }, + { + "epoch": 0.8927127176693901, + "grad_norm": 2.8294518322670243, + "learning_rate": 8.965525053212837e-08, + "loss": 0.5256, + "step": 21634 + }, + { + "epoch": 0.892753982008748, + "grad_norm": 6.984648297024352, + "learning_rate": 8.958699292084737e-08, + "loss": 0.5038, + "step": 21635 + }, + { + "epoch": 0.8927952463481059, + "grad_norm": 2.7786163096348386, + "learning_rate": 8.951876050307084e-08, + "loss": 0.4515, + "step": 21636 + }, + { + "epoch": 0.892836510687464, + "grad_norm": 3.516411026309352, + "learning_rate": 8.945055328001778e-08, + "loss": 0.5152, + "step": 21637 + }, + { + "epoch": 0.8928777750268219, + "grad_norm": 2.4129312838961154, + "learning_rate": 8.938237125290671e-08, + "loss": 0.5021, + "step": 21638 + }, + { + "epoch": 0.8929190393661798, + "grad_norm": 4.574278104180239, + "learning_rate": 8.931421442295501e-08, + "loss": 0.5307, + "step": 21639 + }, + { + "epoch": 0.8929603037055377, + "grad_norm": 3.588880197039716, + "learning_rate": 8.924608279138085e-08, + "loss": 0.505, + "step": 21640 + }, + { + "epoch": 0.8930015680448956, + "grad_norm": 4.136064692625804, + "learning_rate": 8.917797635940045e-08, + "loss": 0.4965, + "step": 21641 + }, + { + "epoch": 0.8930428323842535, + "grad_norm": 3.283313753164604, + "learning_rate": 8.910989512823065e-08, + "loss": 0.5373, + "step": 21642 + }, + { + "epoch": 0.8930840967236114, + "grad_norm": 4.616410527807149, + "learning_rate": 8.90418390990878e-08, + "loss": 0.4883, + "step": 21643 + }, + { + "epoch": 0.8931253610629694, + "grad_norm": 3.964766686110219, + "learning_rate": 8.897380827318747e-08, + "loss": 0.5025, + "step": 21644 + }, + { + "epoch": 0.8931666254023273, + "grad_norm": 1.99100925217168, + "learning_rate": 8.890580265174464e-08, + "loss": 0.5183, + "step": 21645 + }, + { + "epoch": 0.8932078897416852, + "grad_norm": 3.6789455097947688, + "learning_rate": 8.883782223597403e-08, + "loss": 0.5502, + "step": 21646 + }, + { + "epoch": 0.8932491540810432, + "grad_norm": 2.7277702124886596, + "learning_rate": 8.876986702709034e-08, + "loss": 0.5614, + "step": 21647 + }, + { + "epoch": 0.8932904184204011, + "grad_norm": 2.3107233230912594, + "learning_rate": 8.870193702630725e-08, + "loss": 0.5477, + "step": 21648 + }, + { + "epoch": 0.893331682759759, + "grad_norm": 3.112046504448009, + "learning_rate": 8.863403223483796e-08, + "loss": 0.5048, + "step": 21649 + }, + { + "epoch": 0.893372947099117, + "grad_norm": 3.1407176513265758, + "learning_rate": 8.856615265389551e-08, + "loss": 0.5468, + "step": 21650 + }, + { + "epoch": 0.8934142114384749, + "grad_norm": 9.744228306366226, + "learning_rate": 8.849829828469258e-08, + "loss": 0.5264, + "step": 21651 + }, + { + "epoch": 0.8934554757778328, + "grad_norm": 2.9049329551219327, + "learning_rate": 8.843046912844122e-08, + "loss": 0.5497, + "step": 21652 + }, + { + "epoch": 0.8934967401171907, + "grad_norm": 13.658539105980799, + "learning_rate": 8.836266518635245e-08, + "loss": 0.5125, + "step": 21653 + }, + { + "epoch": 0.8935380044565486, + "grad_norm": 4.429656701743761, + "learning_rate": 8.829488645963812e-08, + "loss": 0.5464, + "step": 21654 + }, + { + "epoch": 0.8935792687959065, + "grad_norm": 5.170688919402739, + "learning_rate": 8.822713294950846e-08, + "loss": 0.4831, + "step": 21655 + }, + { + "epoch": 0.8936205331352645, + "grad_norm": 2.503308649213766, + "learning_rate": 8.815940465717431e-08, + "loss": 0.4954, + "step": 21656 + }, + { + "epoch": 0.8936617974746224, + "grad_norm": 4.382533156383575, + "learning_rate": 8.809170158384472e-08, + "loss": 0.4898, + "step": 21657 + }, + { + "epoch": 0.8937030618139804, + "grad_norm": 3.270536628836406, + "learning_rate": 8.802402373072938e-08, + "loss": 0.514, + "step": 21658 + }, + { + "epoch": 0.8937443261533383, + "grad_norm": 2.370154859633004, + "learning_rate": 8.795637109903731e-08, + "loss": 0.4148, + "step": 21659 + }, + { + "epoch": 0.8937855904926962, + "grad_norm": 3.138791218922821, + "learning_rate": 8.788874368997707e-08, + "loss": 0.5298, + "step": 21660 + }, + { + "epoch": 0.8938268548320542, + "grad_norm": 2.7832356273362953, + "learning_rate": 8.782114150475617e-08, + "loss": 0.5294, + "step": 21661 + }, + { + "epoch": 0.8938681191714121, + "grad_norm": 3.717829840684972, + "learning_rate": 8.775356454458234e-08, + "loss": 0.4721, + "step": 21662 + }, + { + "epoch": 0.89390938351077, + "grad_norm": 15.547788308927203, + "learning_rate": 8.768601281066274e-08, + "loss": 0.5314, + "step": 21663 + }, + { + "epoch": 0.8939506478501279, + "grad_norm": 6.088162510449287, + "learning_rate": 8.761848630420443e-08, + "loss": 0.4893, + "step": 21664 + }, + { + "epoch": 0.8939919121894858, + "grad_norm": 11.147504994636169, + "learning_rate": 8.755098502641296e-08, + "loss": 0.4916, + "step": 21665 + }, + { + "epoch": 0.8940331765288437, + "grad_norm": 3.7709674059141043, + "learning_rate": 8.748350897849433e-08, + "loss": 0.4674, + "step": 21666 + }, + { + "epoch": 0.8940744408682016, + "grad_norm": 4.612351081713655, + "learning_rate": 8.74160581616536e-08, + "loss": 0.5363, + "step": 21667 + }, + { + "epoch": 0.8941157052075597, + "grad_norm": 5.11036006826841, + "learning_rate": 8.73486325770958e-08, + "loss": 0.4837, + "step": 21668 + }, + { + "epoch": 0.8941569695469176, + "grad_norm": 5.10100588612628, + "learning_rate": 8.72812322260258e-08, + "loss": 0.5445, + "step": 21669 + }, + { + "epoch": 0.8941982338862755, + "grad_norm": 2.83521120504339, + "learning_rate": 8.721385710964663e-08, + "loss": 0.5449, + "step": 21670 + }, + { + "epoch": 0.8942394982256334, + "grad_norm": 5.353986920289951, + "learning_rate": 8.714650722916217e-08, + "loss": 0.4841, + "step": 21671 + }, + { + "epoch": 0.8942807625649913, + "grad_norm": 2.370473724140895, + "learning_rate": 8.707918258577563e-08, + "loss": 0.5379, + "step": 21672 + }, + { + "epoch": 0.8943220269043493, + "grad_norm": 2.46462793665569, + "learning_rate": 8.70118831806897e-08, + "loss": 0.5146, + "step": 21673 + }, + { + "epoch": 0.8943632912437072, + "grad_norm": 5.295085956318814, + "learning_rate": 8.694460901510609e-08, + "loss": 0.4492, + "step": 21674 + }, + { + "epoch": 0.8944045555830651, + "grad_norm": 2.569029867283653, + "learning_rate": 8.687736009022668e-08, + "loss": 0.4885, + "step": 21675 + }, + { + "epoch": 0.894445819922423, + "grad_norm": 8.225860807442073, + "learning_rate": 8.681013640725283e-08, + "loss": 0.5004, + "step": 21676 + }, + { + "epoch": 0.8944870842617809, + "grad_norm": 4.9154829046832695, + "learning_rate": 8.674293796738542e-08, + "loss": 0.5124, + "step": 21677 + }, + { + "epoch": 0.894528348601139, + "grad_norm": 3.443651179264829, + "learning_rate": 8.667576477182416e-08, + "loss": 0.5324, + "step": 21678 + }, + { + "epoch": 0.8945696129404969, + "grad_norm": 7.122339938099977, + "learning_rate": 8.660861682176941e-08, + "loss": 0.5276, + "step": 21679 + }, + { + "epoch": 0.8946108772798548, + "grad_norm": 2.974133395595193, + "learning_rate": 8.654149411842038e-08, + "loss": 0.4868, + "step": 21680 + }, + { + "epoch": 0.8946521416192127, + "grad_norm": 4.017170625218571, + "learning_rate": 8.647439666297662e-08, + "loss": 0.4844, + "step": 21681 + }, + { + "epoch": 0.8946934059585706, + "grad_norm": 4.35878227403221, + "learning_rate": 8.640732445663584e-08, + "loss": 0.4884, + "step": 21682 + }, + { + "epoch": 0.8947346702979285, + "grad_norm": 2.470881062150222, + "learning_rate": 8.634027750059658e-08, + "loss": 0.4993, + "step": 21683 + }, + { + "epoch": 0.8947759346372864, + "grad_norm": 6.579327516923218, + "learning_rate": 8.627325579605655e-08, + "loss": 0.5005, + "step": 21684 + }, + { + "epoch": 0.8948171989766444, + "grad_norm": 6.414140520369044, + "learning_rate": 8.620625934421277e-08, + "loss": 0.5038, + "step": 21685 + }, + { + "epoch": 0.8948584633160023, + "grad_norm": 2.71735070997923, + "learning_rate": 8.613928814626182e-08, + "loss": 0.4785, + "step": 21686 + }, + { + "epoch": 0.8948997276553602, + "grad_norm": 5.225987174028134, + "learning_rate": 8.60723422034002e-08, + "loss": 0.5726, + "step": 21687 + }, + { + "epoch": 0.8949409919947182, + "grad_norm": 4.048089033317693, + "learning_rate": 8.600542151682366e-08, + "loss": 0.581, + "step": 21688 + }, + { + "epoch": 0.8949822563340761, + "grad_norm": 3.2774611164244942, + "learning_rate": 8.593852608772773e-08, + "loss": 0.4972, + "step": 21689 + }, + { + "epoch": 0.895023520673434, + "grad_norm": 2.519029456343925, + "learning_rate": 8.587165591730695e-08, + "loss": 0.5318, + "step": 21690 + }, + { + "epoch": 0.895064785012792, + "grad_norm": 3.0095117793098263, + "learning_rate": 8.58048110067562e-08, + "loss": 0.5239, + "step": 21691 + }, + { + "epoch": 0.8951060493521499, + "grad_norm": 2.3860153140185263, + "learning_rate": 8.573799135726901e-08, + "loss": 0.5069, + "step": 21692 + }, + { + "epoch": 0.8951473136915078, + "grad_norm": 2.8469093895624313, + "learning_rate": 8.567119697003961e-08, + "loss": 0.4921, + "step": 21693 + }, + { + "epoch": 0.8951885780308657, + "grad_norm": 2.894134893780314, + "learning_rate": 8.560442784626038e-08, + "loss": 0.4944, + "step": 21694 + }, + { + "epoch": 0.8952298423702236, + "grad_norm": 2.756614701631344, + "learning_rate": 8.553768398712436e-08, + "loss": 0.5168, + "step": 21695 + }, + { + "epoch": 0.8952711067095815, + "grad_norm": 4.167046691069673, + "learning_rate": 8.547096539382376e-08, + "loss": 0.5161, + "step": 21696 + }, + { + "epoch": 0.8953123710489395, + "grad_norm": 2.5966211403946953, + "learning_rate": 8.540427206755064e-08, + "loss": 0.5243, + "step": 21697 + }, + { + "epoch": 0.8953536353882975, + "grad_norm": 3.0426361050634148, + "learning_rate": 8.53376040094957e-08, + "loss": 0.5805, + "step": 21698 + }, + { + "epoch": 0.8953948997276554, + "grad_norm": 2.6130458678223585, + "learning_rate": 8.527096122085016e-08, + "loss": 0.5454, + "step": 21699 + }, + { + "epoch": 0.8954361640670133, + "grad_norm": 2.2294356364232852, + "learning_rate": 8.520434370280439e-08, + "loss": 0.4795, + "step": 21700 + }, + { + "epoch": 0.8954774284063712, + "grad_norm": 3.685541948316507, + "learning_rate": 8.513775145654846e-08, + "loss": 0.5097, + "step": 21701 + }, + { + "epoch": 0.8955186927457292, + "grad_norm": 3.3691108150062146, + "learning_rate": 8.507118448327139e-08, + "loss": 0.4579, + "step": 21702 + }, + { + "epoch": 0.8955599570850871, + "grad_norm": 3.774097787324051, + "learning_rate": 8.500464278416276e-08, + "loss": 0.4843, + "step": 21703 + }, + { + "epoch": 0.895601221424445, + "grad_norm": 2.707043280473951, + "learning_rate": 8.49381263604111e-08, + "loss": 0.5626, + "step": 21704 + }, + { + "epoch": 0.8956424857638029, + "grad_norm": 9.036147183522955, + "learning_rate": 8.487163521320429e-08, + "loss": 0.5476, + "step": 21705 + }, + { + "epoch": 0.8956837501031608, + "grad_norm": 2.4851377306273235, + "learning_rate": 8.480516934373039e-08, + "loss": 0.4996, + "step": 21706 + }, + { + "epoch": 0.8957250144425187, + "grad_norm": 5.195115047327151, + "learning_rate": 8.473872875317629e-08, + "loss": 0.5119, + "step": 21707 + }, + { + "epoch": 0.8957662787818768, + "grad_norm": 3.1429750522553093, + "learning_rate": 8.467231344272885e-08, + "loss": 0.5311, + "step": 21708 + }, + { + "epoch": 0.8958075431212347, + "grad_norm": 2.720838641050204, + "learning_rate": 8.460592341357449e-08, + "loss": 0.5185, + "step": 21709 + }, + { + "epoch": 0.8958488074605926, + "grad_norm": 3.245180663122762, + "learning_rate": 8.453955866689939e-08, + "loss": 0.4902, + "step": 21710 + }, + { + "epoch": 0.8958900717999505, + "grad_norm": 3.6843545592253584, + "learning_rate": 8.447321920388845e-08, + "loss": 0.4921, + "step": 21711 + }, + { + "epoch": 0.8959313361393084, + "grad_norm": 2.1429730798730766, + "learning_rate": 8.44069050257269e-08, + "loss": 0.4645, + "step": 21712 + }, + { + "epoch": 0.8959726004786663, + "grad_norm": 3.955355084349434, + "learning_rate": 8.434061613359962e-08, + "loss": 0.513, + "step": 21713 + }, + { + "epoch": 0.8960138648180243, + "grad_norm": 4.40222042005872, + "learning_rate": 8.427435252868998e-08, + "loss": 0.4926, + "step": 21714 + }, + { + "epoch": 0.8960551291573822, + "grad_norm": 2.407154683562568, + "learning_rate": 8.420811421218222e-08, + "loss": 0.4947, + "step": 21715 + }, + { + "epoch": 0.8960963934967401, + "grad_norm": 2.3807451573847174, + "learning_rate": 8.414190118525905e-08, + "loss": 0.5136, + "step": 21716 + }, + { + "epoch": 0.896137657836098, + "grad_norm": 5.411119713064905, + "learning_rate": 8.407571344910387e-08, + "loss": 0.5337, + "step": 21717 + }, + { + "epoch": 0.8961789221754559, + "grad_norm": 2.435837489533874, + "learning_rate": 8.400955100489855e-08, + "loss": 0.5173, + "step": 21718 + }, + { + "epoch": 0.8962201865148139, + "grad_norm": 3.944792307534537, + "learning_rate": 8.394341385382464e-08, + "loss": 0.4891, + "step": 21719 + }, + { + "epoch": 0.8962614508541719, + "grad_norm": 5.924326276737725, + "learning_rate": 8.387730199706372e-08, + "loss": 0.5204, + "step": 21720 + }, + { + "epoch": 0.8963027151935298, + "grad_norm": 2.2152554474156863, + "learning_rate": 8.3811215435797e-08, + "loss": 0.4895, + "step": 21721 + }, + { + "epoch": 0.8963439795328877, + "grad_norm": 3.8757403443758163, + "learning_rate": 8.374515417120471e-08, + "loss": 0.4676, + "step": 21722 + }, + { + "epoch": 0.8963852438722456, + "grad_norm": 2.9427704305088853, + "learning_rate": 8.367911820446688e-08, + "loss": 0.5185, + "step": 21723 + }, + { + "epoch": 0.8964265082116035, + "grad_norm": 2.248339274282283, + "learning_rate": 8.361310753676294e-08, + "loss": 0.4958, + "step": 21724 + }, + { + "epoch": 0.8964677725509614, + "grad_norm": 2.7537291092890976, + "learning_rate": 8.354712216927223e-08, + "loss": 0.4657, + "step": 21725 + }, + { + "epoch": 0.8965090368903194, + "grad_norm": 3.337572136810634, + "learning_rate": 8.348116210317352e-08, + "loss": 0.4755, + "step": 21726 + }, + { + "epoch": 0.8965503012296773, + "grad_norm": 4.634016276245574, + "learning_rate": 8.341522733964469e-08, + "loss": 0.5185, + "step": 21727 + }, + { + "epoch": 0.8965915655690352, + "grad_norm": 2.372678846352758, + "learning_rate": 8.334931787986361e-08, + "loss": 0.5414, + "step": 21728 + }, + { + "epoch": 0.8966328299083932, + "grad_norm": 2.488848417805109, + "learning_rate": 8.328343372500752e-08, + "loss": 0.5197, + "step": 21729 + }, + { + "epoch": 0.8966740942477511, + "grad_norm": 2.2574147281414154, + "learning_rate": 8.321757487625397e-08, + "loss": 0.4935, + "step": 21730 + }, + { + "epoch": 0.896715358587109, + "grad_norm": 2.076011124952396, + "learning_rate": 8.315174133477821e-08, + "loss": 0.5405, + "step": 21731 + }, + { + "epoch": 0.896756622926467, + "grad_norm": 2.047903090679283, + "learning_rate": 8.308593310175694e-08, + "loss": 0.5092, + "step": 21732 + }, + { + "epoch": 0.8967978872658249, + "grad_norm": 2.6243018449468978, + "learning_rate": 8.302015017836539e-08, + "loss": 0.5058, + "step": 21733 + }, + { + "epoch": 0.8968391516051828, + "grad_norm": 3.736374282612429, + "learning_rate": 8.29543925657788e-08, + "loss": 0.5115, + "step": 21734 + }, + { + "epoch": 0.8968804159445407, + "grad_norm": 2.9813379475206996, + "learning_rate": 8.288866026517156e-08, + "loss": 0.5865, + "step": 21735 + }, + { + "epoch": 0.8969216802838986, + "grad_norm": 3.010198807456436, + "learning_rate": 8.282295327771804e-08, + "loss": 0.4663, + "step": 21736 + }, + { + "epoch": 0.8969629446232565, + "grad_norm": 4.269277128035332, + "learning_rate": 8.275727160459168e-08, + "loss": 0.4963, + "step": 21737 + }, + { + "epoch": 0.8970042089626145, + "grad_norm": 3.0457077834966255, + "learning_rate": 8.2691615246966e-08, + "loss": 0.4968, + "step": 21738 + }, + { + "epoch": 0.8970454733019725, + "grad_norm": 3.2770541656372845, + "learning_rate": 8.262598420601358e-08, + "loss": 0.5251, + "step": 21739 + }, + { + "epoch": 0.8970867376413304, + "grad_norm": 3.4107321316222747, + "learning_rate": 8.256037848290665e-08, + "loss": 0.5309, + "step": 21740 + }, + { + "epoch": 0.8971280019806883, + "grad_norm": 2.531385762439162, + "learning_rate": 8.249479807881727e-08, + "loss": 0.5316, + "step": 21741 + }, + { + "epoch": 0.8971692663200462, + "grad_norm": 5.104004054507385, + "learning_rate": 8.242924299491716e-08, + "loss": 0.4943, + "step": 21742 + }, + { + "epoch": 0.8972105306594041, + "grad_norm": 3.361376498437812, + "learning_rate": 8.23637132323769e-08, + "loss": 0.573, + "step": 21743 + }, + { + "epoch": 0.8972517949987621, + "grad_norm": 2.4120749057034474, + "learning_rate": 8.229820879236704e-08, + "loss": 0.4891, + "step": 21744 + }, + { + "epoch": 0.89729305933812, + "grad_norm": 6.041749777419796, + "learning_rate": 8.22327296760575e-08, + "loss": 0.5429, + "step": 21745 + }, + { + "epoch": 0.8973343236774779, + "grad_norm": 3.3708374469960494, + "learning_rate": 8.21672758846183e-08, + "loss": 0.5516, + "step": 21746 + }, + { + "epoch": 0.8973755880168358, + "grad_norm": 3.2135667479503676, + "learning_rate": 8.210184741921855e-08, + "loss": 0.5056, + "step": 21747 + }, + { + "epoch": 0.8974168523561937, + "grad_norm": 3.297508009062025, + "learning_rate": 8.203644428102664e-08, + "loss": 0.5993, + "step": 21748 + }, + { + "epoch": 0.8974581166955518, + "grad_norm": 2.756297206500724, + "learning_rate": 8.197106647121111e-08, + "loss": 0.5223, + "step": 21749 + }, + { + "epoch": 0.8974993810349097, + "grad_norm": 2.7278916785195286, + "learning_rate": 8.190571399093954e-08, + "loss": 0.5088, + "step": 21750 + }, + { + "epoch": 0.8975406453742676, + "grad_norm": 5.598069723560271, + "learning_rate": 8.184038684137984e-08, + "loss": 0.5042, + "step": 21751 + }, + { + "epoch": 0.8975819097136255, + "grad_norm": 2.8672446328637524, + "learning_rate": 8.177508502369824e-08, + "loss": 0.4835, + "step": 21752 + }, + { + "epoch": 0.8976231740529834, + "grad_norm": 3.703194700210891, + "learning_rate": 8.170980853906146e-08, + "loss": 0.5179, + "step": 21753 + }, + { + "epoch": 0.8976644383923413, + "grad_norm": 3.3886967235973073, + "learning_rate": 8.164455738863574e-08, + "loss": 0.4494, + "step": 21754 + }, + { + "epoch": 0.8977057027316993, + "grad_norm": 3.3515095637790826, + "learning_rate": 8.157933157358616e-08, + "loss": 0.4584, + "step": 21755 + }, + { + "epoch": 0.8977469670710572, + "grad_norm": 2.3304553834517736, + "learning_rate": 8.151413109507827e-08, + "loss": 0.4731, + "step": 21756 + }, + { + "epoch": 0.8977882314104151, + "grad_norm": 19.884080346951695, + "learning_rate": 8.144895595427632e-08, + "loss": 0.4827, + "step": 21757 + }, + { + "epoch": 0.897829495749773, + "grad_norm": 2.5863182569847147, + "learning_rate": 8.138380615234469e-08, + "loss": 0.5337, + "step": 21758 + }, + { + "epoch": 0.897870760089131, + "grad_norm": 2.188264997761687, + "learning_rate": 8.131868169044732e-08, + "loss": 0.4579, + "step": 21759 + }, + { + "epoch": 0.8979120244284889, + "grad_norm": 3.1841452477083947, + "learning_rate": 8.125358256974708e-08, + "loss": 0.4755, + "step": 21760 + }, + { + "epoch": 0.8979532887678469, + "grad_norm": 2.538947772754879, + "learning_rate": 8.118850879140705e-08, + "loss": 0.4635, + "step": 21761 + }, + { + "epoch": 0.8979945531072048, + "grad_norm": 1.9217085374771234, + "learning_rate": 8.112346035658963e-08, + "loss": 0.456, + "step": 21762 + }, + { + "epoch": 0.8980358174465627, + "grad_norm": 2.9529047314281147, + "learning_rate": 8.105843726645673e-08, + "loss": 0.4731, + "step": 21763 + }, + { + "epoch": 0.8980770817859206, + "grad_norm": 3.2618157100109544, + "learning_rate": 8.099343952216976e-08, + "loss": 0.5124, + "step": 21764 + }, + { + "epoch": 0.8981183461252785, + "grad_norm": 2.7151028956716683, + "learning_rate": 8.09284671248896e-08, + "loss": 0.5728, + "step": 21765 + }, + { + "epoch": 0.8981596104646364, + "grad_norm": 3.0459712050607157, + "learning_rate": 8.086352007577702e-08, + "loss": 0.5231, + "step": 21766 + }, + { + "epoch": 0.8982008748039944, + "grad_norm": 13.85779514933638, + "learning_rate": 8.079859837599241e-08, + "loss": 0.4571, + "step": 21767 + }, + { + "epoch": 0.8982421391433523, + "grad_norm": 3.318453926564153, + "learning_rate": 8.0733702026695e-08, + "loss": 0.5144, + "step": 21768 + }, + { + "epoch": 0.8982834034827103, + "grad_norm": 3.923016667603699, + "learning_rate": 8.066883102904387e-08, + "loss": 0.4611, + "step": 21769 + }, + { + "epoch": 0.8983246678220682, + "grad_norm": 2.3043178322978237, + "learning_rate": 8.060398538419845e-08, + "loss": 0.5001, + "step": 21770 + }, + { + "epoch": 0.8983659321614261, + "grad_norm": 4.823347167094623, + "learning_rate": 8.053916509331661e-08, + "loss": 0.5235, + "step": 21771 + }, + { + "epoch": 0.898407196500784, + "grad_norm": 2.6708927708767165, + "learning_rate": 8.047437015755593e-08, + "loss": 0.5138, + "step": 21772 + }, + { + "epoch": 0.898448460840142, + "grad_norm": 4.846271350492965, + "learning_rate": 8.040960057807417e-08, + "loss": 0.5094, + "step": 21773 + }, + { + "epoch": 0.8984897251794999, + "grad_norm": 4.787567082440481, + "learning_rate": 8.034485635602823e-08, + "loss": 0.4875, + "step": 21774 + }, + { + "epoch": 0.8985309895188578, + "grad_norm": 2.5747549106805, + "learning_rate": 8.028013749257484e-08, + "loss": 0.435, + "step": 21775 + }, + { + "epoch": 0.8985722538582157, + "grad_norm": 3.4275806993719993, + "learning_rate": 8.021544398886943e-08, + "loss": 0.4834, + "step": 21776 + }, + { + "epoch": 0.8986135181975736, + "grad_norm": 4.142949464398762, + "learning_rate": 8.015077584606806e-08, + "loss": 0.4969, + "step": 21777 + }, + { + "epoch": 0.8986547825369315, + "grad_norm": 2.3059134627397686, + "learning_rate": 8.00861330653258e-08, + "loss": 0.507, + "step": 21778 + }, + { + "epoch": 0.8986960468762895, + "grad_norm": 8.804774599660147, + "learning_rate": 8.002151564779742e-08, + "loss": 0.5404, + "step": 21779 + }, + { + "epoch": 0.8987373112156475, + "grad_norm": 2.48645553333747, + "learning_rate": 7.99569235946368e-08, + "loss": 0.4339, + "step": 21780 + }, + { + "epoch": 0.8987785755550054, + "grad_norm": 4.544962198014957, + "learning_rate": 7.989235690699787e-08, + "loss": 0.5421, + "step": 21781 + }, + { + "epoch": 0.8988198398943633, + "grad_norm": 3.3338137238902212, + "learning_rate": 7.982781558603419e-08, + "loss": 0.4639, + "step": 21782 + }, + { + "epoch": 0.8988611042337212, + "grad_norm": 2.4448104678975486, + "learning_rate": 7.976329963289853e-08, + "loss": 0.4702, + "step": 21783 + }, + { + "epoch": 0.8989023685730791, + "grad_norm": 2.5808361571398337, + "learning_rate": 7.969880904874327e-08, + "loss": 0.4547, + "step": 21784 + }, + { + "epoch": 0.8989436329124371, + "grad_norm": 3.4532368855271374, + "learning_rate": 7.963434383472001e-08, + "loss": 0.4947, + "step": 21785 + }, + { + "epoch": 0.898984897251795, + "grad_norm": 2.265969107448615, + "learning_rate": 7.956990399198066e-08, + "loss": 0.5204, + "step": 21786 + }, + { + "epoch": 0.8990261615911529, + "grad_norm": 2.2058389982854227, + "learning_rate": 7.95054895216763e-08, + "loss": 0.5362, + "step": 21787 + }, + { + "epoch": 0.8990674259305108, + "grad_norm": 2.8540693162742348, + "learning_rate": 7.94411004249575e-08, + "loss": 0.4746, + "step": 21788 + }, + { + "epoch": 0.8991086902698687, + "grad_norm": 4.687580423407652, + "learning_rate": 7.937673670297418e-08, + "loss": 0.5417, + "step": 21789 + }, + { + "epoch": 0.8991499546092268, + "grad_norm": 2.109880266625305, + "learning_rate": 7.93123983568761e-08, + "loss": 0.5297, + "step": 21790 + }, + { + "epoch": 0.8991912189485847, + "grad_norm": 3.8744755993506264, + "learning_rate": 7.924808538781298e-08, + "loss": 0.5391, + "step": 21791 + }, + { + "epoch": 0.8992324832879426, + "grad_norm": 8.506686021439975, + "learning_rate": 7.918379779693291e-08, + "loss": 0.5434, + "step": 21792 + }, + { + "epoch": 0.8992737476273005, + "grad_norm": 6.252822304872469, + "learning_rate": 7.911953558538465e-08, + "loss": 0.5228, + "step": 21793 + }, + { + "epoch": 0.8993150119666584, + "grad_norm": 2.969411268281515, + "learning_rate": 7.905529875431594e-08, + "loss": 0.5628, + "step": 21794 + }, + { + "epoch": 0.8993562763060163, + "grad_norm": 6.4919465319725065, + "learning_rate": 7.899108730487436e-08, + "loss": 0.4697, + "step": 21795 + }, + { + "epoch": 0.8993975406453742, + "grad_norm": 3.0430011847268696, + "learning_rate": 7.892690123820684e-08, + "loss": 0.4903, + "step": 21796 + }, + { + "epoch": 0.8994388049847322, + "grad_norm": 3.527008781194611, + "learning_rate": 7.886274055545961e-08, + "loss": 0.5453, + "step": 21797 + }, + { + "epoch": 0.8994800693240901, + "grad_norm": 4.279825032217089, + "learning_rate": 7.87986052577791e-08, + "loss": 0.5095, + "step": 21798 + }, + { + "epoch": 0.899521333663448, + "grad_norm": 2.416978814576468, + "learning_rate": 7.873449534631072e-08, + "loss": 0.5367, + "step": 21799 + }, + { + "epoch": 0.899562598002806, + "grad_norm": 3.6685173108207807, + "learning_rate": 7.86704108221999e-08, + "loss": 0.5454, + "step": 21800 + }, + { + "epoch": 0.8996038623421639, + "grad_norm": 3.0951654206095034, + "learning_rate": 7.860635168659102e-08, + "loss": 0.5044, + "step": 21801 + }, + { + "epoch": 0.8996451266815219, + "grad_norm": 2.671501168500945, + "learning_rate": 7.854231794062838e-08, + "loss": 0.4407, + "step": 21802 + }, + { + "epoch": 0.8996863910208798, + "grad_norm": 12.120440295456145, + "learning_rate": 7.847830958545604e-08, + "loss": 0.5101, + "step": 21803 + }, + { + "epoch": 0.8997276553602377, + "grad_norm": 2.9437585209713917, + "learning_rate": 7.841432662221726e-08, + "loss": 0.4837, + "step": 21804 + }, + { + "epoch": 0.8997689196995956, + "grad_norm": 2.239371069722357, + "learning_rate": 7.835036905205462e-08, + "loss": 0.4524, + "step": 21805 + }, + { + "epoch": 0.8998101840389535, + "grad_norm": 2.4503450704234266, + "learning_rate": 7.828643687611087e-08, + "loss": 0.5225, + "step": 21806 + }, + { + "epoch": 0.8998514483783114, + "grad_norm": 2.595331839103365, + "learning_rate": 7.82225300955281e-08, + "loss": 0.5145, + "step": 21807 + }, + { + "epoch": 0.8998927127176694, + "grad_norm": 2.3856495273107927, + "learning_rate": 7.815864871144773e-08, + "loss": 0.5119, + "step": 21808 + }, + { + "epoch": 0.8999339770570273, + "grad_norm": 3.1447769424357213, + "learning_rate": 7.809479272501085e-08, + "loss": 0.4819, + "step": 21809 + }, + { + "epoch": 0.8999752413963853, + "grad_norm": 7.755238350213857, + "learning_rate": 7.803096213735772e-08, + "loss": 0.4492, + "step": 21810 + }, + { + "epoch": 0.9000165057357432, + "grad_norm": 5.420518577996305, + "learning_rate": 7.796715694962891e-08, + "loss": 0.5428, + "step": 21811 + }, + { + "epoch": 0.9000577700751011, + "grad_norm": 5.621965559501247, + "learning_rate": 7.790337716296436e-08, + "loss": 0.5227, + "step": 21812 + }, + { + "epoch": 0.900099034414459, + "grad_norm": 16.36271113601891, + "learning_rate": 7.783962277850265e-08, + "loss": 0.4692, + "step": 21813 + }, + { + "epoch": 0.900140298753817, + "grad_norm": 3.8696165786929537, + "learning_rate": 7.777589379738303e-08, + "loss": 0.5461, + "step": 21814 + }, + { + "epoch": 0.9001815630931749, + "grad_norm": 8.071499208070623, + "learning_rate": 7.771219022074394e-08, + "loss": 0.5805, + "step": 21815 + }, + { + "epoch": 0.9002228274325328, + "grad_norm": 3.3695055177448427, + "learning_rate": 7.764851204972328e-08, + "loss": 0.5262, + "step": 21816 + }, + { + "epoch": 0.9002640917718907, + "grad_norm": 2.7781139825199888, + "learning_rate": 7.7584859285458e-08, + "loss": 0.4854, + "step": 21817 + }, + { + "epoch": 0.9003053561112486, + "grad_norm": 6.97481745876817, + "learning_rate": 7.752123192908566e-08, + "loss": 0.4781, + "step": 21818 + }, + { + "epoch": 0.9003466204506065, + "grad_norm": 24.73986869182236, + "learning_rate": 7.745762998174255e-08, + "loss": 0.5079, + "step": 21819 + }, + { + "epoch": 0.9003878847899646, + "grad_norm": 2.59551815477575, + "learning_rate": 7.739405344456508e-08, + "loss": 0.5415, + "step": 21820 + }, + { + "epoch": 0.9004291491293225, + "grad_norm": 9.660276227869916, + "learning_rate": 7.73305023186885e-08, + "loss": 0.5025, + "step": 21821 + }, + { + "epoch": 0.9004704134686804, + "grad_norm": 2.6089048992095476, + "learning_rate": 7.726697660524806e-08, + "loss": 0.4784, + "step": 21822 + }, + { + "epoch": 0.9005116778080383, + "grad_norm": 3.424110671603097, + "learning_rate": 7.720347630537855e-08, + "loss": 0.5094, + "step": 21823 + }, + { + "epoch": 0.9005529421473962, + "grad_norm": 3.810543312982963, + "learning_rate": 7.714000142021421e-08, + "loss": 0.5228, + "step": 21824 + }, + { + "epoch": 0.9005942064867541, + "grad_norm": 11.83930841341205, + "learning_rate": 7.707655195088913e-08, + "loss": 0.4912, + "step": 21825 + }, + { + "epoch": 0.9006354708261121, + "grad_norm": 2.33601389860054, + "learning_rate": 7.701312789853626e-08, + "loss": 0.5199, + "step": 21826 + }, + { + "epoch": 0.90067673516547, + "grad_norm": 3.3106954316713315, + "learning_rate": 7.69497292642885e-08, + "loss": 0.487, + "step": 21827 + }, + { + "epoch": 0.9007179995048279, + "grad_norm": 2.275352801172296, + "learning_rate": 7.688635604927863e-08, + "loss": 0.51, + "step": 21828 + }, + { + "epoch": 0.9007592638441858, + "grad_norm": 3.0812652213081937, + "learning_rate": 7.682300825463873e-08, + "loss": 0.5162, + "step": 21829 + }, + { + "epoch": 0.9008005281835438, + "grad_norm": 4.892937158538115, + "learning_rate": 7.675968588150006e-08, + "loss": 0.5236, + "step": 21830 + }, + { + "epoch": 0.9008417925229018, + "grad_norm": 4.6091505151371415, + "learning_rate": 7.669638893099357e-08, + "loss": 0.505, + "step": 21831 + }, + { + "epoch": 0.9008830568622597, + "grad_norm": 4.719717841542986, + "learning_rate": 7.66331174042505e-08, + "loss": 0.5081, + "step": 21832 + }, + { + "epoch": 0.9009243212016176, + "grad_norm": 2.6194412975059262, + "learning_rate": 7.656987130240045e-08, + "loss": 0.481, + "step": 21833 + }, + { + "epoch": 0.9009655855409755, + "grad_norm": 2.6316071673209036, + "learning_rate": 7.650665062657337e-08, + "loss": 0.4549, + "step": 21834 + }, + { + "epoch": 0.9010068498803334, + "grad_norm": 4.142761644903491, + "learning_rate": 7.644345537789866e-08, + "loss": 0.5456, + "step": 21835 + }, + { + "epoch": 0.9010481142196913, + "grad_norm": 2.791918630882992, + "learning_rate": 7.638028555750492e-08, + "loss": 0.5192, + "step": 21836 + }, + { + "epoch": 0.9010893785590492, + "grad_norm": 4.808598400112059, + "learning_rate": 7.631714116652078e-08, + "loss": 0.494, + "step": 21837 + }, + { + "epoch": 0.9011306428984072, + "grad_norm": 2.3142138854342633, + "learning_rate": 7.625402220607397e-08, + "loss": 0.5225, + "step": 21838 + }, + { + "epoch": 0.9011719072377651, + "grad_norm": 10.772329581071183, + "learning_rate": 7.619092867729177e-08, + "loss": 0.4843, + "step": 21839 + }, + { + "epoch": 0.901213171577123, + "grad_norm": 4.5377976491116785, + "learning_rate": 7.612786058130144e-08, + "loss": 0.5252, + "step": 21840 + }, + { + "epoch": 0.901254435916481, + "grad_norm": 3.141308994697954, + "learning_rate": 7.606481791922976e-08, + "loss": 0.5348, + "step": 21841 + }, + { + "epoch": 0.9012957002558389, + "grad_norm": 3.6594556681179427, + "learning_rate": 7.600180069220247e-08, + "loss": 0.4868, + "step": 21842 + }, + { + "epoch": 0.9013369645951969, + "grad_norm": 10.55266345887309, + "learning_rate": 7.59388089013452e-08, + "loss": 0.4712, + "step": 21843 + }, + { + "epoch": 0.9013782289345548, + "grad_norm": 2.2346087971919277, + "learning_rate": 7.587584254778318e-08, + "loss": 0.4811, + "step": 21844 + }, + { + "epoch": 0.9014194932739127, + "grad_norm": 7.293635497038383, + "learning_rate": 7.581290163264154e-08, + "loss": 0.4662, + "step": 21845 + }, + { + "epoch": 0.9014607576132706, + "grad_norm": 2.3783099443463382, + "learning_rate": 7.574998615704404e-08, + "loss": 0.5271, + "step": 21846 + }, + { + "epoch": 0.9015020219526285, + "grad_norm": 2.4674532365652784, + "learning_rate": 7.568709612211478e-08, + "loss": 0.5035, + "step": 21847 + }, + { + "epoch": 0.9015432862919864, + "grad_norm": 5.93059880173298, + "learning_rate": 7.562423152897718e-08, + "loss": 0.5173, + "step": 21848 + }, + { + "epoch": 0.9015845506313444, + "grad_norm": 3.009508499061884, + "learning_rate": 7.556139237875404e-08, + "loss": 0.5416, + "step": 21849 + }, + { + "epoch": 0.9016258149707023, + "grad_norm": 2.513501219316788, + "learning_rate": 7.54985786725676e-08, + "loss": 0.4671, + "step": 21850 + }, + { + "epoch": 0.9016670793100603, + "grad_norm": 2.819901253946181, + "learning_rate": 7.543579041154014e-08, + "loss": 0.4905, + "step": 21851 + }, + { + "epoch": 0.9017083436494182, + "grad_norm": 3.19230091593013, + "learning_rate": 7.537302759679326e-08, + "loss": 0.5837, + "step": 21852 + }, + { + "epoch": 0.9017496079887761, + "grad_norm": 3.568035759727854, + "learning_rate": 7.531029022944808e-08, + "loss": 0.4863, + "step": 21853 + }, + { + "epoch": 0.901790872328134, + "grad_norm": 4.090373542607458, + "learning_rate": 7.524757831062501e-08, + "loss": 0.5446, + "step": 21854 + }, + { + "epoch": 0.901832136667492, + "grad_norm": 30.39126453458429, + "learning_rate": 7.518489184144434e-08, + "loss": 0.4675, + "step": 21855 + }, + { + "epoch": 0.9018734010068499, + "grad_norm": 4.237235081382101, + "learning_rate": 7.512223082302583e-08, + "loss": 0.4615, + "step": 21856 + }, + { + "epoch": 0.9019146653462078, + "grad_norm": 4.176263214794511, + "learning_rate": 7.505959525648892e-08, + "loss": 0.5058, + "step": 21857 + }, + { + "epoch": 0.9019559296855657, + "grad_norm": 2.136508373989596, + "learning_rate": 7.499698514295222e-08, + "loss": 0.4543, + "step": 21858 + }, + { + "epoch": 0.9019971940249236, + "grad_norm": 3.4333081712203355, + "learning_rate": 7.493440048353401e-08, + "loss": 0.5124, + "step": 21859 + }, + { + "epoch": 0.9020384583642815, + "grad_norm": 2.153666178038886, + "learning_rate": 7.487184127935254e-08, + "loss": 0.4649, + "step": 21860 + }, + { + "epoch": 0.9020797227036396, + "grad_norm": 3.040223809225438, + "learning_rate": 7.480930753152526e-08, + "loss": 0.4247, + "step": 21861 + }, + { + "epoch": 0.9021209870429975, + "grad_norm": 64.10976747057768, + "learning_rate": 7.474679924116895e-08, + "loss": 0.5385, + "step": 21862 + }, + { + "epoch": 0.9021622513823554, + "grad_norm": 4.437686067472052, + "learning_rate": 7.468431640940021e-08, + "loss": 0.4975, + "step": 21863 + }, + { + "epoch": 0.9022035157217133, + "grad_norm": 2.5839857895531435, + "learning_rate": 7.462185903733499e-08, + "loss": 0.5178, + "step": 21864 + }, + { + "epoch": 0.9022447800610712, + "grad_norm": 2.5462115740635634, + "learning_rate": 7.45594271260892e-08, + "loss": 0.4894, + "step": 21865 + }, + { + "epoch": 0.9022860444004291, + "grad_norm": 7.303655136291775, + "learning_rate": 7.449702067677816e-08, + "loss": 0.5159, + "step": 21866 + }, + { + "epoch": 0.9023273087397871, + "grad_norm": 4.111072154551088, + "learning_rate": 7.443463969051629e-08, + "loss": 0.4734, + "step": 21867 + }, + { + "epoch": 0.902368573079145, + "grad_norm": 3.3678149680695775, + "learning_rate": 7.437228416841785e-08, + "loss": 0.5122, + "step": 21868 + }, + { + "epoch": 0.9024098374185029, + "grad_norm": 3.655711594359864, + "learning_rate": 7.430995411159696e-08, + "loss": 0.4949, + "step": 21869 + }, + { + "epoch": 0.9024511017578608, + "grad_norm": 4.294523878572138, + "learning_rate": 7.424764952116659e-08, + "loss": 0.5156, + "step": 21870 + }, + { + "epoch": 0.9024923660972188, + "grad_norm": 4.907246582167872, + "learning_rate": 7.418537039823997e-08, + "loss": 0.4812, + "step": 21871 + }, + { + "epoch": 0.9025336304365768, + "grad_norm": 4.718579365670197, + "learning_rate": 7.412311674392957e-08, + "loss": 0.5236, + "step": 21872 + }, + { + "epoch": 0.9025748947759347, + "grad_norm": 2.9449721891462155, + "learning_rate": 7.406088855934733e-08, + "loss": 0.481, + "step": 21873 + }, + { + "epoch": 0.9026161591152926, + "grad_norm": 2.7290107958897525, + "learning_rate": 7.399868584560471e-08, + "loss": 0.5447, + "step": 21874 + }, + { + "epoch": 0.9026574234546505, + "grad_norm": 3.706863348497761, + "learning_rate": 7.393650860381295e-08, + "loss": 0.524, + "step": 21875 + }, + { + "epoch": 0.9026986877940084, + "grad_norm": 2.6593568402311782, + "learning_rate": 7.387435683508237e-08, + "loss": 0.4956, + "step": 21876 + }, + { + "epoch": 0.9027399521333663, + "grad_norm": 2.6598680121019505, + "learning_rate": 7.381223054052355e-08, + "loss": 0.4267, + "step": 21877 + }, + { + "epoch": 0.9027812164727242, + "grad_norm": 4.049527233595589, + "learning_rate": 7.375012972124628e-08, + "loss": 0.561, + "step": 21878 + }, + { + "epoch": 0.9028224808120822, + "grad_norm": 3.3094099794568117, + "learning_rate": 7.368805437835935e-08, + "loss": 0.5483, + "step": 21879 + }, + { + "epoch": 0.9028637451514401, + "grad_norm": 5.815454576210371, + "learning_rate": 7.362600451297185e-08, + "loss": 0.5126, + "step": 21880 + }, + { + "epoch": 0.9029050094907981, + "grad_norm": 4.532836088035384, + "learning_rate": 7.356398012619208e-08, + "loss": 0.4835, + "step": 21881 + }, + { + "epoch": 0.902946273830156, + "grad_norm": 2.453474557250552, + "learning_rate": 7.350198121912833e-08, + "loss": 0.5384, + "step": 21882 + }, + { + "epoch": 0.9029875381695139, + "grad_norm": 2.8770342763420853, + "learning_rate": 7.34400077928875e-08, + "loss": 0.5424, + "step": 21883 + }, + { + "epoch": 0.9030288025088719, + "grad_norm": 40.351668581981535, + "learning_rate": 7.337805984857693e-08, + "loss": 0.5091, + "step": 21884 + }, + { + "epoch": 0.9030700668482298, + "grad_norm": 4.390392566034874, + "learning_rate": 7.331613738730286e-08, + "loss": 0.4774, + "step": 21885 + }, + { + "epoch": 0.9031113311875877, + "grad_norm": 2.2556434876768763, + "learning_rate": 7.325424041017192e-08, + "loss": 0.4542, + "step": 21886 + }, + { + "epoch": 0.9031525955269456, + "grad_norm": 3.885164125343564, + "learning_rate": 7.319236891828923e-08, + "loss": 0.5132, + "step": 21887 + }, + { + "epoch": 0.9031938598663035, + "grad_norm": 3.748778134703192, + "learning_rate": 7.31305229127604e-08, + "loss": 0.553, + "step": 21888 + }, + { + "epoch": 0.9032351242056614, + "grad_norm": 4.308766638392988, + "learning_rate": 7.306870239468971e-08, + "loss": 0.4922, + "step": 21889 + }, + { + "epoch": 0.9032763885450193, + "grad_norm": 4.422779556692505, + "learning_rate": 7.300690736518178e-08, + "loss": 0.5474, + "step": 21890 + }, + { + "epoch": 0.9033176528843774, + "grad_norm": 6.450806619510485, + "learning_rate": 7.294513782534007e-08, + "loss": 0.5228, + "step": 21891 + }, + { + "epoch": 0.9033589172237353, + "grad_norm": 2.54696504690509, + "learning_rate": 7.288339377626818e-08, + "loss": 0.4613, + "step": 21892 + }, + { + "epoch": 0.9034001815630932, + "grad_norm": 5.357930289979994, + "learning_rate": 7.28216752190689e-08, + "loss": 0.4633, + "step": 21893 + }, + { + "epoch": 0.9034414459024511, + "grad_norm": 3.825408866366832, + "learning_rate": 7.275998215484503e-08, + "loss": 0.4992, + "step": 21894 + }, + { + "epoch": 0.903482710241809, + "grad_norm": 4.174333269796597, + "learning_rate": 7.2698314584698e-08, + "loss": 0.489, + "step": 21895 + }, + { + "epoch": 0.903523974581167, + "grad_norm": 5.615633300592573, + "learning_rate": 7.263667250972977e-08, + "loss": 0.4802, + "step": 21896 + }, + { + "epoch": 0.9035652389205249, + "grad_norm": 2.7553961766120283, + "learning_rate": 7.257505593104113e-08, + "loss": 0.5085, + "step": 21897 + }, + { + "epoch": 0.9036065032598828, + "grad_norm": 2.3902392279571583, + "learning_rate": 7.251346484973321e-08, + "loss": 0.4947, + "step": 21898 + }, + { + "epoch": 0.9036477675992407, + "grad_norm": 2.9483524155602936, + "learning_rate": 7.24518992669056e-08, + "loss": 0.479, + "step": 21899 + }, + { + "epoch": 0.9036890319385986, + "grad_norm": 6.191633047010771, + "learning_rate": 7.239035918365811e-08, + "loss": 0.5056, + "step": 21900 + }, + { + "epoch": 0.9037302962779565, + "grad_norm": 3.7161626604531546, + "learning_rate": 7.232884460109051e-08, + "loss": 0.5058, + "step": 21901 + }, + { + "epoch": 0.9037715606173146, + "grad_norm": 3.9316063048677377, + "learning_rate": 7.226735552030094e-08, + "loss": 0.5389, + "step": 21902 + }, + { + "epoch": 0.9038128249566725, + "grad_norm": 10.45702564971059, + "learning_rate": 7.220589194238819e-08, + "loss": 0.5425, + "step": 21903 + }, + { + "epoch": 0.9038540892960304, + "grad_norm": 18.18831157915057, + "learning_rate": 7.214445386844986e-08, + "loss": 0.5381, + "step": 21904 + }, + { + "epoch": 0.9038953536353883, + "grad_norm": 2.66148860843233, + "learning_rate": 7.208304129958359e-08, + "loss": 0.499, + "step": 21905 + }, + { + "epoch": 0.9039366179747462, + "grad_norm": 4.799485457000894, + "learning_rate": 7.202165423688617e-08, + "loss": 0.5242, + "step": 21906 + }, + { + "epoch": 0.9039778823141041, + "grad_norm": 7.864746209589831, + "learning_rate": 7.196029268145454e-08, + "loss": 0.5918, + "step": 21907 + }, + { + "epoch": 0.9040191466534621, + "grad_norm": 2.1129530388717126, + "learning_rate": 7.189895663438433e-08, + "loss": 0.4892, + "step": 21908 + }, + { + "epoch": 0.90406041099282, + "grad_norm": 25.40341146760157, + "learning_rate": 7.183764609677134e-08, + "loss": 0.5078, + "step": 21909 + }, + { + "epoch": 0.9041016753321779, + "grad_norm": 2.751962320744161, + "learning_rate": 7.177636106971086e-08, + "loss": 0.5183, + "step": 21910 + }, + { + "epoch": 0.9041429396715358, + "grad_norm": 6.884354186718176, + "learning_rate": 7.171510155429717e-08, + "loss": 0.521, + "step": 21911 + }, + { + "epoch": 0.9041842040108938, + "grad_norm": 3.423835431679687, + "learning_rate": 7.16538675516249e-08, + "loss": 0.4635, + "step": 21912 + }, + { + "epoch": 0.9042254683502517, + "grad_norm": 3.3549972003225275, + "learning_rate": 7.159265906278767e-08, + "loss": 0.4678, + "step": 21913 + }, + { + "epoch": 0.9042667326896097, + "grad_norm": 3.519066431974903, + "learning_rate": 7.153147608887911e-08, + "loss": 0.5437, + "step": 21914 + }, + { + "epoch": 0.9043079970289676, + "grad_norm": 10.277609426522256, + "learning_rate": 7.147031863099185e-08, + "loss": 0.4637, + "step": 21915 + }, + { + "epoch": 0.9043492613683255, + "grad_norm": 2.99061480458222, + "learning_rate": 7.1409186690218e-08, + "loss": 0.4718, + "step": 21916 + }, + { + "epoch": 0.9043905257076834, + "grad_norm": 2.56959226027921, + "learning_rate": 7.134808026765005e-08, + "loss": 0.5091, + "step": 21917 + }, + { + "epoch": 0.9044317900470413, + "grad_norm": 3.548578493869902, + "learning_rate": 7.128699936437927e-08, + "loss": 0.5227, + "step": 21918 + }, + { + "epoch": 0.9044730543863992, + "grad_norm": 2.7259851964391832, + "learning_rate": 7.122594398149695e-08, + "loss": 0.4579, + "step": 21919 + }, + { + "epoch": 0.9045143187257572, + "grad_norm": 2.078770352086228, + "learning_rate": 7.116491412009341e-08, + "loss": 0.511, + "step": 21920 + }, + { + "epoch": 0.9045555830651151, + "grad_norm": 2.9469441170556827, + "learning_rate": 7.110390978125875e-08, + "loss": 0.5093, + "step": 21921 + }, + { + "epoch": 0.9045968474044731, + "grad_norm": 2.10895713239245, + "learning_rate": 7.104293096608295e-08, + "loss": 0.4898, + "step": 21922 + }, + { + "epoch": 0.904638111743831, + "grad_norm": 4.379152642380062, + "learning_rate": 7.098197767565512e-08, + "loss": 0.5244, + "step": 21923 + }, + { + "epoch": 0.9046793760831889, + "grad_norm": 5.773946810184511, + "learning_rate": 7.092104991106391e-08, + "loss": 0.5944, + "step": 21924 + }, + { + "epoch": 0.9047206404225469, + "grad_norm": 4.993662077364554, + "learning_rate": 7.086014767339793e-08, + "loss": 0.4964, + "step": 21925 + }, + { + "epoch": 0.9047619047619048, + "grad_norm": 3.7188073674831377, + "learning_rate": 7.079927096374466e-08, + "loss": 0.5052, + "step": 21926 + }, + { + "epoch": 0.9048031691012627, + "grad_norm": 5.084457334526719, + "learning_rate": 7.073841978319223e-08, + "loss": 0.5332, + "step": 21927 + }, + { + "epoch": 0.9048444334406206, + "grad_norm": 10.73380484128524, + "learning_rate": 7.06775941328266e-08, + "loss": 0.5051, + "step": 21928 + }, + { + "epoch": 0.9048856977799785, + "grad_norm": 6.617212425155409, + "learning_rate": 7.061679401373489e-08, + "loss": 0.489, + "step": 21929 + }, + { + "epoch": 0.9049269621193364, + "grad_norm": 3.3502891248917686, + "learning_rate": 7.05560194270029e-08, + "loss": 0.4827, + "step": 21930 + }, + { + "epoch": 0.9049682264586943, + "grad_norm": 1.8752000612431923, + "learning_rate": 7.049527037371661e-08, + "loss": 0.4801, + "step": 21931 + }, + { + "epoch": 0.9050094907980524, + "grad_norm": 3.080342710843894, + "learning_rate": 7.043454685496048e-08, + "loss": 0.5011, + "step": 21932 + }, + { + "epoch": 0.9050507551374103, + "grad_norm": 3.631189872227645, + "learning_rate": 7.03738488718198e-08, + "loss": 0.543, + "step": 21933 + }, + { + "epoch": 0.9050920194767682, + "grad_norm": 2.0551314565413983, + "learning_rate": 7.031317642537838e-08, + "loss": 0.5069, + "step": 21934 + }, + { + "epoch": 0.9051332838161261, + "grad_norm": 5.116438379853537, + "learning_rate": 7.025252951672051e-08, + "loss": 0.5538, + "step": 21935 + }, + { + "epoch": 0.905174548155484, + "grad_norm": 3.0198349301353886, + "learning_rate": 7.019190814692883e-08, + "loss": 0.5045, + "step": 21936 + }, + { + "epoch": 0.905215812494842, + "grad_norm": 2.556521681047164, + "learning_rate": 7.013131231708647e-08, + "loss": 0.5316, + "step": 21937 + }, + { + "epoch": 0.9052570768341999, + "grad_norm": 3.9491428528069417, + "learning_rate": 7.00707420282759e-08, + "loss": 0.4879, + "step": 21938 + }, + { + "epoch": 0.9052983411735578, + "grad_norm": 5.340231565421461, + "learning_rate": 7.001019728157926e-08, + "loss": 0.4806, + "step": 21939 + }, + { + "epoch": 0.9053396055129157, + "grad_norm": 3.832757877634436, + "learning_rate": 6.994967807807784e-08, + "loss": 0.4985, + "step": 21940 + }, + { + "epoch": 0.9053808698522736, + "grad_norm": 5.622518341078946, + "learning_rate": 6.988918441885228e-08, + "loss": 0.4683, + "step": 21941 + }, + { + "epoch": 0.9054221341916316, + "grad_norm": 3.006846890199357, + "learning_rate": 6.982871630498355e-08, + "loss": 0.5018, + "step": 21942 + }, + { + "epoch": 0.9054633985309896, + "grad_norm": 2.891737103754431, + "learning_rate": 6.976827373755163e-08, + "loss": 0.5143, + "step": 21943 + }, + { + "epoch": 0.9055046628703475, + "grad_norm": 2.367269465578383, + "learning_rate": 6.970785671763646e-08, + "loss": 0.5079, + "step": 21944 + }, + { + "epoch": 0.9055459272097054, + "grad_norm": 2.517800606137935, + "learning_rate": 6.964746524631687e-08, + "loss": 0.5205, + "step": 21945 + }, + { + "epoch": 0.9055871915490633, + "grad_norm": 7.660112010024137, + "learning_rate": 6.958709932467166e-08, + "loss": 0.5373, + "step": 21946 + }, + { + "epoch": 0.9056284558884212, + "grad_norm": 1.9659667678299406, + "learning_rate": 6.95267589537793e-08, + "loss": 0.4406, + "step": 21947 + }, + { + "epoch": 0.9056697202277791, + "grad_norm": 5.463875267643832, + "learning_rate": 6.946644413471759e-08, + "loss": 0.5306, + "step": 21948 + }, + { + "epoch": 0.9057109845671371, + "grad_norm": 4.045226561985533, + "learning_rate": 6.940615486856366e-08, + "loss": 0.5554, + "step": 21949 + }, + { + "epoch": 0.905752248906495, + "grad_norm": 14.184270995875464, + "learning_rate": 6.934589115639467e-08, + "loss": 0.5378, + "step": 21950 + }, + { + "epoch": 0.9057935132458529, + "grad_norm": 2.1762038254587113, + "learning_rate": 6.928565299928708e-08, + "loss": 0.4952, + "step": 21951 + }, + { + "epoch": 0.9058347775852109, + "grad_norm": 2.5851154808129144, + "learning_rate": 6.922544039831668e-08, + "loss": 0.5164, + "step": 21952 + }, + { + "epoch": 0.9058760419245688, + "grad_norm": 3.5644515870539295, + "learning_rate": 6.916525335455931e-08, + "loss": 0.51, + "step": 21953 + }, + { + "epoch": 0.9059173062639267, + "grad_norm": 2.4437316611996116, + "learning_rate": 6.910509186908992e-08, + "loss": 0.5686, + "step": 21954 + }, + { + "epoch": 0.9059585706032847, + "grad_norm": 3.400806894267647, + "learning_rate": 6.904495594298316e-08, + "loss": 0.4518, + "step": 21955 + }, + { + "epoch": 0.9059998349426426, + "grad_norm": 3.141156819935062, + "learning_rate": 6.898484557731316e-08, + "loss": 0.5426, + "step": 21956 + }, + { + "epoch": 0.9060410992820005, + "grad_norm": 2.054440299412089, + "learning_rate": 6.892476077315358e-08, + "loss": 0.5224, + "step": 21957 + }, + { + "epoch": 0.9060823636213584, + "grad_norm": 3.502008731640663, + "learning_rate": 6.88647015315777e-08, + "loss": 0.5501, + "step": 21958 + }, + { + "epoch": 0.9061236279607163, + "grad_norm": 2.1355623454532777, + "learning_rate": 6.880466785365851e-08, + "loss": 0.5234, + "step": 21959 + }, + { + "epoch": 0.9061648923000742, + "grad_norm": 3.585387240692446, + "learning_rate": 6.87446597404685e-08, + "loss": 0.5813, + "step": 21960 + }, + { + "epoch": 0.9062061566394322, + "grad_norm": 3.5465929312189473, + "learning_rate": 6.868467719307897e-08, + "loss": 0.5151, + "step": 21961 + }, + { + "epoch": 0.9062474209787901, + "grad_norm": 2.3013800667645623, + "learning_rate": 6.862472021256172e-08, + "loss": 0.4745, + "step": 21962 + }, + { + "epoch": 0.9062886853181481, + "grad_norm": 3.0274765824304057, + "learning_rate": 6.85647887999879e-08, + "loss": 0.5129, + "step": 21963 + }, + { + "epoch": 0.906329949657506, + "grad_norm": 2.372480367295668, + "learning_rate": 6.850488295642782e-08, + "loss": 0.4978, + "step": 21964 + }, + { + "epoch": 0.9063712139968639, + "grad_norm": 3.013473080240621, + "learning_rate": 6.844500268295145e-08, + "loss": 0.5038, + "step": 21965 + }, + { + "epoch": 0.9064124783362218, + "grad_norm": 2.585685976921729, + "learning_rate": 6.838514798062862e-08, + "loss": 0.4806, + "step": 21966 + }, + { + "epoch": 0.9064537426755798, + "grad_norm": 4.308541283801637, + "learning_rate": 6.832531885052861e-08, + "loss": 0.5214, + "step": 21967 + }, + { + "epoch": 0.9064950070149377, + "grad_norm": 3.692921332362944, + "learning_rate": 6.826551529371978e-08, + "loss": 0.5221, + "step": 21968 + }, + { + "epoch": 0.9065362713542956, + "grad_norm": 3.697589795227597, + "learning_rate": 6.820573731127039e-08, + "loss": 0.5534, + "step": 21969 + }, + { + "epoch": 0.9065775356936535, + "grad_norm": 3.6908300864781163, + "learning_rate": 6.814598490424828e-08, + "loss": 0.5147, + "step": 21970 + }, + { + "epoch": 0.9066188000330114, + "grad_norm": 2.4672606535364134, + "learning_rate": 6.808625807372093e-08, + "loss": 0.5151, + "step": 21971 + }, + { + "epoch": 0.9066600643723693, + "grad_norm": 2.8867669152426347, + "learning_rate": 6.802655682075515e-08, + "loss": 0.5171, + "step": 21972 + }, + { + "epoch": 0.9067013287117274, + "grad_norm": 2.848445859482924, + "learning_rate": 6.796688114641725e-08, + "loss": 0.4945, + "step": 21973 + }, + { + "epoch": 0.9067425930510853, + "grad_norm": 2.4038287000432756, + "learning_rate": 6.790723105177321e-08, + "loss": 0.5113, + "step": 21974 + }, + { + "epoch": 0.9067838573904432, + "grad_norm": 5.7697419982688665, + "learning_rate": 6.784760653788851e-08, + "loss": 0.5511, + "step": 21975 + }, + { + "epoch": 0.9068251217298011, + "grad_norm": 5.102183070623261, + "learning_rate": 6.778800760582848e-08, + "loss": 0.5597, + "step": 21976 + }, + { + "epoch": 0.906866386069159, + "grad_norm": 2.4545614180000017, + "learning_rate": 6.772843425665742e-08, + "loss": 0.5187, + "step": 21977 + }, + { + "epoch": 0.906907650408517, + "grad_norm": 3.781077912275672, + "learning_rate": 6.766888649143932e-08, + "loss": 0.5558, + "step": 21978 + }, + { + "epoch": 0.9069489147478749, + "grad_norm": 1.9154152449351516, + "learning_rate": 6.760936431123816e-08, + "loss": 0.4631, + "step": 21979 + }, + { + "epoch": 0.9069901790872328, + "grad_norm": 5.790823546161019, + "learning_rate": 6.754986771711724e-08, + "loss": 0.5119, + "step": 21980 + }, + { + "epoch": 0.9070314434265907, + "grad_norm": 3.639668810509753, + "learning_rate": 6.749039671013907e-08, + "loss": 0.5741, + "step": 21981 + }, + { + "epoch": 0.9070727077659486, + "grad_norm": 2.4568762234391452, + "learning_rate": 6.743095129136578e-08, + "loss": 0.4708, + "step": 21982 + }, + { + "epoch": 0.9071139721053066, + "grad_norm": 2.38906734778391, + "learning_rate": 6.737153146185953e-08, + "loss": 0.5169, + "step": 21983 + }, + { + "epoch": 0.9071552364446646, + "grad_norm": 3.1394315523392797, + "learning_rate": 6.731213722268165e-08, + "loss": 0.5484, + "step": 21984 + }, + { + "epoch": 0.9071965007840225, + "grad_norm": 2.4676963622749004, + "learning_rate": 6.72527685748931e-08, + "loss": 0.5769, + "step": 21985 + }, + { + "epoch": 0.9072377651233804, + "grad_norm": 2.7405978192112883, + "learning_rate": 6.719342551955421e-08, + "loss": 0.5097, + "step": 21986 + }, + { + "epoch": 0.9072790294627383, + "grad_norm": 4.401964167649849, + "learning_rate": 6.713410805772496e-08, + "loss": 0.5524, + "step": 21987 + }, + { + "epoch": 0.9073202938020962, + "grad_norm": 3.7563554945489814, + "learning_rate": 6.707481619046535e-08, + "loss": 0.4774, + "step": 21988 + }, + { + "epoch": 0.9073615581414541, + "grad_norm": 11.818445041482198, + "learning_rate": 6.701554991883385e-08, + "loss": 0.4858, + "step": 21989 + }, + { + "epoch": 0.907402822480812, + "grad_norm": 2.2723413239515056, + "learning_rate": 6.695630924388946e-08, + "loss": 0.4795, + "step": 21990 + }, + { + "epoch": 0.90744408682017, + "grad_norm": 3.549995748930045, + "learning_rate": 6.689709416669032e-08, + "loss": 0.4681, + "step": 21991 + }, + { + "epoch": 0.9074853511595279, + "grad_norm": 2.3792464596205223, + "learning_rate": 6.683790468829426e-08, + "loss": 0.5606, + "step": 21992 + }, + { + "epoch": 0.9075266154988859, + "grad_norm": 3.7733922014875656, + "learning_rate": 6.67787408097586e-08, + "loss": 0.5067, + "step": 21993 + }, + { + "epoch": 0.9075678798382438, + "grad_norm": 2.6617639574726457, + "learning_rate": 6.671960253213965e-08, + "loss": 0.536, + "step": 21994 + }, + { + "epoch": 0.9076091441776017, + "grad_norm": 3.8933471749055983, + "learning_rate": 6.666048985649408e-08, + "loss": 0.5413, + "step": 21995 + }, + { + "epoch": 0.9076504085169597, + "grad_norm": 2.0835651459633175, + "learning_rate": 6.660140278387806e-08, + "loss": 0.4894, + "step": 21996 + }, + { + "epoch": 0.9076916728563176, + "grad_norm": 2.0119446749972965, + "learning_rate": 6.654234131534687e-08, + "loss": 0.5178, + "step": 21997 + }, + { + "epoch": 0.9077329371956755, + "grad_norm": 2.6391667455317354, + "learning_rate": 6.648330545195519e-08, + "loss": 0.5401, + "step": 21998 + }, + { + "epoch": 0.9077742015350334, + "grad_norm": 2.51503163741982, + "learning_rate": 6.642429519475785e-08, + "loss": 0.4833, + "step": 21999 + }, + { + "epoch": 0.9078154658743913, + "grad_norm": 3.2637479374600464, + "learning_rate": 6.636531054480882e-08, + "loss": 0.4743, + "step": 22000 + }, + { + "epoch": 0.9078567302137492, + "grad_norm": 2.1738419829705022, + "learning_rate": 6.630635150316195e-08, + "loss": 0.4208, + "step": 22001 + }, + { + "epoch": 0.9078979945531072, + "grad_norm": 3.0089069837379725, + "learning_rate": 6.624741807086988e-08, + "loss": 0.4742, + "step": 22002 + }, + { + "epoch": 0.9079392588924652, + "grad_norm": 38.578175633083, + "learning_rate": 6.618851024898575e-08, + "loss": 0.5096, + "step": 22003 + }, + { + "epoch": 0.9079805232318231, + "grad_norm": 4.6498180986580016, + "learning_rate": 6.612962803856177e-08, + "loss": 0.5074, + "step": 22004 + }, + { + "epoch": 0.908021787571181, + "grad_norm": 3.8205284235745047, + "learning_rate": 6.607077144064971e-08, + "loss": 0.5405, + "step": 22005 + }, + { + "epoch": 0.9080630519105389, + "grad_norm": 4.100355736792446, + "learning_rate": 6.601194045630077e-08, + "loss": 0.5679, + "step": 22006 + }, + { + "epoch": 0.9081043162498968, + "grad_norm": 2.3797512673943646, + "learning_rate": 6.595313508656576e-08, + "loss": 0.4873, + "step": 22007 + }, + { + "epoch": 0.9081455805892548, + "grad_norm": 3.6837811606192736, + "learning_rate": 6.589435533249516e-08, + "loss": 0.5579, + "step": 22008 + }, + { + "epoch": 0.9081868449286127, + "grad_norm": 2.2976807361635285, + "learning_rate": 6.583560119513898e-08, + "loss": 0.4459, + "step": 22009 + }, + { + "epoch": 0.9082281092679706, + "grad_norm": 2.8123183179782125, + "learning_rate": 6.577687267554671e-08, + "loss": 0.5037, + "step": 22010 + }, + { + "epoch": 0.9082693736073285, + "grad_norm": 2.1012791422362014, + "learning_rate": 6.571816977476735e-08, + "loss": 0.489, + "step": 22011 + }, + { + "epoch": 0.9083106379466864, + "grad_norm": 3.135614390526715, + "learning_rate": 6.56594924938494e-08, + "loss": 0.5524, + "step": 22012 + }, + { + "epoch": 0.9083519022860445, + "grad_norm": 2.7987969346306745, + "learning_rate": 6.560084083384149e-08, + "loss": 0.4975, + "step": 22013 + }, + { + "epoch": 0.9083931666254024, + "grad_norm": 2.4238645697575887, + "learning_rate": 6.554221479579047e-08, + "loss": 0.4941, + "step": 22014 + }, + { + "epoch": 0.9084344309647603, + "grad_norm": 5.517898474460764, + "learning_rate": 6.548361438074402e-08, + "loss": 0.5058, + "step": 22015 + }, + { + "epoch": 0.9084756953041182, + "grad_norm": 4.008342897492644, + "learning_rate": 6.542503958974877e-08, + "loss": 0.5311, + "step": 22016 + }, + { + "epoch": 0.9085169596434761, + "grad_norm": 2.1594886703778924, + "learning_rate": 6.53664904238514e-08, + "loss": 0.505, + "step": 22017 + }, + { + "epoch": 0.908558223982834, + "grad_norm": 2.694107376411706, + "learning_rate": 6.530796688409723e-08, + "loss": 0.6345, + "step": 22018 + }, + { + "epoch": 0.908599488322192, + "grad_norm": 6.749630529299021, + "learning_rate": 6.524946897153195e-08, + "loss": 0.477, + "step": 22019 + }, + { + "epoch": 0.9086407526615499, + "grad_norm": 2.958137679211878, + "learning_rate": 6.519099668720002e-08, + "loss": 0.5436, + "step": 22020 + }, + { + "epoch": 0.9086820170009078, + "grad_norm": 2.813183157104387, + "learning_rate": 6.513255003214646e-08, + "loss": 0.5692, + "step": 22021 + }, + { + "epoch": 0.9087232813402657, + "grad_norm": 2.6557261178940963, + "learning_rate": 6.507412900741511e-08, + "loss": 0.4899, + "step": 22022 + }, + { + "epoch": 0.9087645456796236, + "grad_norm": 2.2458403774906475, + "learning_rate": 6.501573361404927e-08, + "loss": 0.4592, + "step": 22023 + }, + { + "epoch": 0.9088058100189816, + "grad_norm": 3.236598017816895, + "learning_rate": 6.495736385309214e-08, + "loss": 0.5129, + "step": 22024 + }, + { + "epoch": 0.9088470743583396, + "grad_norm": 10.840911017627887, + "learning_rate": 6.489901972558671e-08, + "loss": 0.4971, + "step": 22025 + }, + { + "epoch": 0.9088883386976975, + "grad_norm": 6.474044372490642, + "learning_rate": 6.484070123257463e-08, + "loss": 0.4891, + "step": 22026 + }, + { + "epoch": 0.9089296030370554, + "grad_norm": 2.151910061506471, + "learning_rate": 6.478240837509791e-08, + "loss": 0.5413, + "step": 22027 + }, + { + "epoch": 0.9089708673764133, + "grad_norm": 2.534435505491271, + "learning_rate": 6.472414115419773e-08, + "loss": 0.4454, + "step": 22028 + }, + { + "epoch": 0.9090121317157712, + "grad_norm": 3.912783210170096, + "learning_rate": 6.466589957091506e-08, + "loss": 0.5313, + "step": 22029 + }, + { + "epoch": 0.9090533960551291, + "grad_norm": 7.789481036583818, + "learning_rate": 6.460768362628978e-08, + "loss": 0.4891, + "step": 22030 + }, + { + "epoch": 0.909094660394487, + "grad_norm": 3.2470663644680773, + "learning_rate": 6.454949332136218e-08, + "loss": 0.4827, + "step": 22031 + }, + { + "epoch": 0.909135924733845, + "grad_norm": 4.024820364475692, + "learning_rate": 6.449132865717177e-08, + "loss": 0.4989, + "step": 22032 + }, + { + "epoch": 0.9091771890732029, + "grad_norm": 3.350524009645232, + "learning_rate": 6.443318963475726e-08, + "loss": 0.479, + "step": 22033 + }, + { + "epoch": 0.9092184534125609, + "grad_norm": 3.1585408907398236, + "learning_rate": 6.437507625515726e-08, + "loss": 0.4542, + "step": 22034 + }, + { + "epoch": 0.9092597177519188, + "grad_norm": 5.568796411348312, + "learning_rate": 6.431698851940949e-08, + "loss": 0.5004, + "step": 22035 + }, + { + "epoch": 0.9093009820912767, + "grad_norm": 2.4859866665887997, + "learning_rate": 6.425892642855207e-08, + "loss": 0.5245, + "step": 22036 + }, + { + "epoch": 0.9093422464306347, + "grad_norm": 6.2983793343674, + "learning_rate": 6.420088998362172e-08, + "loss": 0.5256, + "step": 22037 + }, + { + "epoch": 0.9093835107699926, + "grad_norm": 2.6883660700434517, + "learning_rate": 6.414287918565558e-08, + "loss": 0.541, + "step": 22038 + }, + { + "epoch": 0.9094247751093505, + "grad_norm": 3.8103451584824413, + "learning_rate": 6.408489403568934e-08, + "loss": 0.5454, + "step": 22039 + }, + { + "epoch": 0.9094660394487084, + "grad_norm": 2.3240182423022464, + "learning_rate": 6.4026934534759e-08, + "loss": 0.5037, + "step": 22040 + }, + { + "epoch": 0.9095073037880663, + "grad_norm": 3.0884896157695385, + "learning_rate": 6.396900068389988e-08, + "loss": 0.5559, + "step": 22041 + }, + { + "epoch": 0.9095485681274242, + "grad_norm": 14.17129452532057, + "learning_rate": 6.391109248414701e-08, + "loss": 0.5102, + "step": 22042 + }, + { + "epoch": 0.9095898324667822, + "grad_norm": 3.336796846513304, + "learning_rate": 6.385320993653438e-08, + "loss": 0.4614, + "step": 22043 + }, + { + "epoch": 0.9096310968061402, + "grad_norm": 2.448937677150983, + "learning_rate": 6.3795353042096e-08, + "loss": 0.4604, + "step": 22044 + }, + { + "epoch": 0.9096723611454981, + "grad_norm": 4.979956023151884, + "learning_rate": 6.373752180186554e-08, + "loss": 0.4812, + "step": 22045 + }, + { + "epoch": 0.909713625484856, + "grad_norm": 1.9122958494651574, + "learning_rate": 6.367971621687635e-08, + "loss": 0.5581, + "step": 22046 + }, + { + "epoch": 0.9097548898242139, + "grad_norm": 37.16162116570125, + "learning_rate": 6.36219362881601e-08, + "loss": 0.5195, + "step": 22047 + }, + { + "epoch": 0.9097961541635718, + "grad_norm": 2.905187097461327, + "learning_rate": 6.356418201674947e-08, + "loss": 0.5118, + "step": 22048 + }, + { + "epoch": 0.9098374185029298, + "grad_norm": 5.570733353369994, + "learning_rate": 6.350645340367577e-08, + "loss": 0.5278, + "step": 22049 + }, + { + "epoch": 0.9098786828422877, + "grad_norm": 4.469850185715029, + "learning_rate": 6.344875044997072e-08, + "loss": 0.4938, + "step": 22050 + }, + { + "epoch": 0.9099199471816456, + "grad_norm": 3.0529913064205045, + "learning_rate": 6.339107315666432e-08, + "loss": 0.5288, + "step": 22051 + }, + { + "epoch": 0.9099612115210035, + "grad_norm": 3.9117992734747045, + "learning_rate": 6.333342152478738e-08, + "loss": 0.4914, + "step": 22052 + }, + { + "epoch": 0.9100024758603614, + "grad_norm": 5.00714280749012, + "learning_rate": 6.327579555536945e-08, + "loss": 0.5337, + "step": 22053 + }, + { + "epoch": 0.9100437401997195, + "grad_norm": 5.406941323707365, + "learning_rate": 6.321819524944017e-08, + "loss": 0.5317, + "step": 22054 + }, + { + "epoch": 0.9100850045390774, + "grad_norm": 8.436269464681692, + "learning_rate": 6.316062060802791e-08, + "loss": 0.5045, + "step": 22055 + }, + { + "epoch": 0.9101262688784353, + "grad_norm": 3.5950833336980415, + "learning_rate": 6.31030716321615e-08, + "loss": 0.4955, + "step": 22056 + }, + { + "epoch": 0.9101675332177932, + "grad_norm": 2.9363786768118154, + "learning_rate": 6.304554832286863e-08, + "loss": 0.4705, + "step": 22057 + }, + { + "epoch": 0.9102087975571511, + "grad_norm": 5.369334760389145, + "learning_rate": 6.29880506811773e-08, + "loss": 0.4636, + "step": 22058 + }, + { + "epoch": 0.910250061896509, + "grad_norm": 2.4983643519060728, + "learning_rate": 6.29305787081142e-08, + "loss": 0.5233, + "step": 22059 + }, + { + "epoch": 0.910291326235867, + "grad_norm": 16.28484925745031, + "learning_rate": 6.287313240470583e-08, + "loss": 0.5493, + "step": 22060 + }, + { + "epoch": 0.9103325905752249, + "grad_norm": 2.8071297776498025, + "learning_rate": 6.281571177197837e-08, + "loss": 0.504, + "step": 22061 + }, + { + "epoch": 0.9103738549145828, + "grad_norm": 2.5647293908675213, + "learning_rate": 6.275831681095768e-08, + "loss": 0.4736, + "step": 22062 + }, + { + "epoch": 0.9104151192539407, + "grad_norm": 3.414890502776913, + "learning_rate": 6.270094752266891e-08, + "loss": 0.4979, + "step": 22063 + }, + { + "epoch": 0.9104563835932987, + "grad_norm": 6.223207380536908, + "learning_rate": 6.264360390813678e-08, + "loss": 0.5044, + "step": 22064 + }, + { + "epoch": 0.9104976479326566, + "grad_norm": 2.2855947528795597, + "learning_rate": 6.258628596838544e-08, + "loss": 0.4731, + "step": 22065 + }, + { + "epoch": 0.9105389122720146, + "grad_norm": 6.131982032175706, + "learning_rate": 6.252899370443927e-08, + "loss": 0.4848, + "step": 22066 + }, + { + "epoch": 0.9105801766113725, + "grad_norm": 2.710986009526193, + "learning_rate": 6.247172711732091e-08, + "loss": 0.5027, + "step": 22067 + }, + { + "epoch": 0.9106214409507304, + "grad_norm": 6.159587161995442, + "learning_rate": 6.241448620805373e-08, + "loss": 0.4814, + "step": 22068 + }, + { + "epoch": 0.9106627052900883, + "grad_norm": 12.848761164229847, + "learning_rate": 6.235727097766009e-08, + "loss": 0.5726, + "step": 22069 + }, + { + "epoch": 0.9107039696294462, + "grad_norm": 5.301191844280023, + "learning_rate": 6.230008142716231e-08, + "loss": 0.5378, + "step": 22070 + }, + { + "epoch": 0.9107452339688041, + "grad_norm": 2.7805210365693784, + "learning_rate": 6.224291755758128e-08, + "loss": 0.4349, + "step": 22071 + }, + { + "epoch": 0.910786498308162, + "grad_norm": 9.928156539454555, + "learning_rate": 6.21857793699388e-08, + "loss": 0.5255, + "step": 22072 + }, + { + "epoch": 0.91082776264752, + "grad_norm": 2.716811675586285, + "learning_rate": 6.212866686525492e-08, + "loss": 0.5303, + "step": 22073 + }, + { + "epoch": 0.910869026986878, + "grad_norm": 3.8350531879206637, + "learning_rate": 6.207158004454999e-08, + "loss": 0.4888, + "step": 22074 + }, + { + "epoch": 0.9109102913262359, + "grad_norm": 4.902641687926633, + "learning_rate": 6.201451890884401e-08, + "loss": 0.4816, + "step": 22075 + }, + { + "epoch": 0.9109515556655938, + "grad_norm": 3.591632258823395, + "learning_rate": 6.195748345915586e-08, + "loss": 0.4883, + "step": 22076 + }, + { + "epoch": 0.9109928200049517, + "grad_norm": 3.38320656039629, + "learning_rate": 6.190047369650454e-08, + "loss": 0.5425, + "step": 22077 + }, + { + "epoch": 0.9110340843443097, + "grad_norm": 2.3643437347299407, + "learning_rate": 6.184348962190822e-08, + "loss": 0.537, + "step": 22078 + }, + { + "epoch": 0.9110753486836676, + "grad_norm": 2.4625989977351517, + "learning_rate": 6.178653123638511e-08, + "loss": 0.4639, + "step": 22079 + }, + { + "epoch": 0.9111166130230255, + "grad_norm": 2.7522593708183707, + "learning_rate": 6.172959854095223e-08, + "loss": 0.5243, + "step": 22080 + }, + { + "epoch": 0.9111578773623834, + "grad_norm": 2.9452723553989513, + "learning_rate": 6.167269153662674e-08, + "loss": 0.5505, + "step": 22081 + }, + { + "epoch": 0.9111991417017413, + "grad_norm": 2.8681651021915244, + "learning_rate": 6.161581022442503e-08, + "loss": 0.5162, + "step": 22082 + }, + { + "epoch": 0.9112404060410992, + "grad_norm": 9.864445672229115, + "learning_rate": 6.155895460536359e-08, + "loss": 0.5135, + "step": 22083 + }, + { + "epoch": 0.9112816703804572, + "grad_norm": 9.188301629069114, + "learning_rate": 6.150212468045729e-08, + "loss": 0.5142, + "step": 22084 + }, + { + "epoch": 0.9113229347198152, + "grad_norm": 2.733276242793425, + "learning_rate": 6.144532045072198e-08, + "loss": 0.5705, + "step": 22085 + }, + { + "epoch": 0.9113641990591731, + "grad_norm": 2.631271700399298, + "learning_rate": 6.138854191717169e-08, + "loss": 0.5077, + "step": 22086 + }, + { + "epoch": 0.911405463398531, + "grad_norm": 2.951589079152119, + "learning_rate": 6.13317890808211e-08, + "loss": 0.4866, + "step": 22087 + }, + { + "epoch": 0.9114467277378889, + "grad_norm": 4.443091649052561, + "learning_rate": 6.127506194268357e-08, + "loss": 0.5343, + "step": 22088 + }, + { + "epoch": 0.9114879920772468, + "grad_norm": 2.84646139246708, + "learning_rate": 6.121836050377244e-08, + "loss": 0.5435, + "step": 22089 + }, + { + "epoch": 0.9115292564166048, + "grad_norm": 5.139294590572367, + "learning_rate": 6.116168476510075e-08, + "loss": 0.4666, + "step": 22090 + }, + { + "epoch": 0.9115705207559627, + "grad_norm": 2.179717182965532, + "learning_rate": 6.110503472768104e-08, + "loss": 0.4807, + "step": 22091 + }, + { + "epoch": 0.9116117850953206, + "grad_norm": 2.749633438727025, + "learning_rate": 6.104841039252479e-08, + "loss": 0.5917, + "step": 22092 + }, + { + "epoch": 0.9116530494346785, + "grad_norm": 3.3929942337360766, + "learning_rate": 6.099181176064355e-08, + "loss": 0.4573, + "step": 22093 + }, + { + "epoch": 0.9116943137740364, + "grad_norm": 2.2763908894637774, + "learning_rate": 6.093523883304853e-08, + "loss": 0.4842, + "step": 22094 + }, + { + "epoch": 0.9117355781133945, + "grad_norm": 2.768247387732774, + "learning_rate": 6.087869161075021e-08, + "loss": 0.538, + "step": 22095 + }, + { + "epoch": 0.9117768424527524, + "grad_norm": 2.3262291904394217, + "learning_rate": 6.082217009475833e-08, + "loss": 0.5011, + "step": 22096 + }, + { + "epoch": 0.9118181067921103, + "grad_norm": 4.924999722181395, + "learning_rate": 6.076567428608287e-08, + "loss": 0.5554, + "step": 22097 + }, + { + "epoch": 0.9118593711314682, + "grad_norm": 4.560326869466482, + "learning_rate": 6.070920418573306e-08, + "loss": 0.4896, + "step": 22098 + }, + { + "epoch": 0.9119006354708261, + "grad_norm": 2.9697127951036624, + "learning_rate": 6.065275979471723e-08, + "loss": 0.5457, + "step": 22099 + }, + { + "epoch": 0.911941899810184, + "grad_norm": 3.8926853948298565, + "learning_rate": 6.059634111404394e-08, + "loss": 0.4706, + "step": 22100 + }, + { + "epoch": 0.911983164149542, + "grad_norm": 4.505174856592548, + "learning_rate": 6.05399481447207e-08, + "loss": 0.4999, + "step": 22101 + }, + { + "epoch": 0.9120244284888999, + "grad_norm": 3.9564145026948716, + "learning_rate": 6.048358088775502e-08, + "loss": 0.4847, + "step": 22102 + }, + { + "epoch": 0.9120656928282578, + "grad_norm": 10.66820641816542, + "learning_rate": 6.042723934415378e-08, + "loss": 0.5565, + "step": 22103 + }, + { + "epoch": 0.9121069571676157, + "grad_norm": 2.413001415354464, + "learning_rate": 6.037092351492318e-08, + "loss": 0.4931, + "step": 22104 + }, + { + "epoch": 0.9121482215069737, + "grad_norm": 2.264985636958611, + "learning_rate": 6.031463340106924e-08, + "loss": 0.4726, + "step": 22105 + }, + { + "epoch": 0.9121894858463316, + "grad_norm": 2.034090631204312, + "learning_rate": 6.025836900359749e-08, + "loss": 0.4749, + "step": 22106 + }, + { + "epoch": 0.9122307501856896, + "grad_norm": 3.1362585093499167, + "learning_rate": 6.02021303235133e-08, + "loss": 0.5232, + "step": 22107 + }, + { + "epoch": 0.9122720145250475, + "grad_norm": 3.7970413091012416, + "learning_rate": 6.014591736182052e-08, + "loss": 0.5119, + "step": 22108 + }, + { + "epoch": 0.9123132788644054, + "grad_norm": 2.298556049102896, + "learning_rate": 6.008973011952367e-08, + "loss": 0.4549, + "step": 22109 + }, + { + "epoch": 0.9123545432037633, + "grad_norm": 3.1269021737100404, + "learning_rate": 6.003356859762632e-08, + "loss": 0.4509, + "step": 22110 + }, + { + "epoch": 0.9123958075431212, + "grad_norm": 4.651706517297863, + "learning_rate": 5.997743279713197e-08, + "loss": 0.5278, + "step": 22111 + }, + { + "epoch": 0.9124370718824791, + "grad_norm": 3.3348232495998773, + "learning_rate": 5.992132271904283e-08, + "loss": 0.5402, + "step": 22112 + }, + { + "epoch": 0.912478336221837, + "grad_norm": 2.322420336463881, + "learning_rate": 5.986523836436142e-08, + "loss": 0.5025, + "step": 22113 + }, + { + "epoch": 0.912519600561195, + "grad_norm": 2.3887456680608254, + "learning_rate": 5.980917973408945e-08, + "loss": 0.5702, + "step": 22114 + }, + { + "epoch": 0.912560864900553, + "grad_norm": 1.8649011176365524, + "learning_rate": 5.975314682922828e-08, + "loss": 0.5242, + "step": 22115 + }, + { + "epoch": 0.9126021292399109, + "grad_norm": 6.539907055071901, + "learning_rate": 5.96971396507791e-08, + "loss": 0.4594, + "step": 22116 + }, + { + "epoch": 0.9126433935792688, + "grad_norm": 5.473057779282048, + "learning_rate": 5.964115819974198e-08, + "loss": 0.4992, + "step": 22117 + }, + { + "epoch": 0.9126846579186267, + "grad_norm": 2.3624231071920794, + "learning_rate": 5.958520247711691e-08, + "loss": 0.5298, + "step": 22118 + }, + { + "epoch": 0.9127259222579847, + "grad_norm": 15.910724746146414, + "learning_rate": 5.952927248390344e-08, + "loss": 0.4894, + "step": 22119 + }, + { + "epoch": 0.9127671865973426, + "grad_norm": 5.092285713721132, + "learning_rate": 5.947336822110094e-08, + "loss": 0.4841, + "step": 22120 + }, + { + "epoch": 0.9128084509367005, + "grad_norm": 3.0096387438058887, + "learning_rate": 5.941748968970745e-08, + "loss": 0.4918, + "step": 22121 + }, + { + "epoch": 0.9128497152760584, + "grad_norm": 17.089707320593515, + "learning_rate": 5.936163689072149e-08, + "loss": 0.4911, + "step": 22122 + }, + { + "epoch": 0.9128909796154163, + "grad_norm": 14.842714368314132, + "learning_rate": 5.930580982514061e-08, + "loss": 0.5273, + "step": 22123 + }, + { + "epoch": 0.9129322439547742, + "grad_norm": 2.91230932988661, + "learning_rate": 5.925000849396234e-08, + "loss": 0.5224, + "step": 22124 + }, + { + "epoch": 0.9129735082941323, + "grad_norm": 2.4485060777935654, + "learning_rate": 5.919423289818271e-08, + "loss": 0.5459, + "step": 22125 + }, + { + "epoch": 0.9130147726334902, + "grad_norm": 4.41551126572194, + "learning_rate": 5.91384830387986e-08, + "loss": 0.4971, + "step": 22126 + }, + { + "epoch": 0.9130560369728481, + "grad_norm": 3.958821968410853, + "learning_rate": 5.9082758916805535e-08, + "loss": 0.5595, + "step": 22127 + }, + { + "epoch": 0.913097301312206, + "grad_norm": 3.2600252033333996, + "learning_rate": 5.902706053319923e-08, + "loss": 0.5204, + "step": 22128 + }, + { + "epoch": 0.9131385656515639, + "grad_norm": 4.125064643381275, + "learning_rate": 5.8971387888974214e-08, + "loss": 0.4733, + "step": 22129 + }, + { + "epoch": 0.9131798299909218, + "grad_norm": 3.552709176598946, + "learning_rate": 5.891574098512503e-08, + "loss": 0.5005, + "step": 22130 + }, + { + "epoch": 0.9132210943302798, + "grad_norm": 7.458316180927174, + "learning_rate": 5.886011982264588e-08, + "loss": 0.4852, + "step": 22131 + }, + { + "epoch": 0.9132623586696377, + "grad_norm": 5.383143784294361, + "learning_rate": 5.8804524402530316e-08, + "loss": 0.5298, + "step": 22132 + }, + { + "epoch": 0.9133036230089956, + "grad_norm": 1.8666766342030812, + "learning_rate": 5.874895472577102e-08, + "loss": 0.5123, + "step": 22133 + }, + { + "epoch": 0.9133448873483535, + "grad_norm": 2.9857161064770414, + "learning_rate": 5.869341079336088e-08, + "loss": 0.4822, + "step": 22134 + }, + { + "epoch": 0.9133861516877115, + "grad_norm": 3.712486629525694, + "learning_rate": 5.8637892606291934e-08, + "loss": 0.4989, + "step": 22135 + }, + { + "epoch": 0.9134274160270694, + "grad_norm": 3.199200961354031, + "learning_rate": 5.8582400165556385e-08, + "loss": 0.5344, + "step": 22136 + }, + { + "epoch": 0.9134686803664274, + "grad_norm": 3.1748692110492294, + "learning_rate": 5.852693347214461e-08, + "loss": 0.5439, + "step": 22137 + }, + { + "epoch": 0.9135099447057853, + "grad_norm": 3.196457334922158, + "learning_rate": 5.8471492527048145e-08, + "loss": 0.5262, + "step": 22138 + }, + { + "epoch": 0.9135512090451432, + "grad_norm": 4.474172535091638, + "learning_rate": 5.84160773312567e-08, + "loss": 0.4715, + "step": 22139 + }, + { + "epoch": 0.9135924733845011, + "grad_norm": 2.6240164835561033, + "learning_rate": 5.836068788576049e-08, + "loss": 0.5094, + "step": 22140 + }, + { + "epoch": 0.913633737723859, + "grad_norm": 4.3769210869998165, + "learning_rate": 5.8305324191549046e-08, + "loss": 0.5071, + "step": 22141 + }, + { + "epoch": 0.913675002063217, + "grad_norm": 2.634334805298862, + "learning_rate": 5.8249986249610754e-08, + "loss": 0.4816, + "step": 22142 + }, + { + "epoch": 0.9137162664025749, + "grad_norm": 3.182808819593145, + "learning_rate": 5.8194674060934486e-08, + "loss": 0.5362, + "step": 22143 + }, + { + "epoch": 0.9137575307419328, + "grad_norm": 7.486022584856199, + "learning_rate": 5.8139387626508445e-08, + "loss": 0.5677, + "step": 22144 + }, + { + "epoch": 0.9137987950812907, + "grad_norm": 10.894995775222705, + "learning_rate": 5.808412694731952e-08, + "loss": 0.4856, + "step": 22145 + }, + { + "epoch": 0.9138400594206487, + "grad_norm": 6.373540858359003, + "learning_rate": 5.802889202435541e-08, + "loss": 0.5048, + "step": 22146 + }, + { + "epoch": 0.9138813237600066, + "grad_norm": 2.5288456920208717, + "learning_rate": 5.7973682858602496e-08, + "loss": 0.5075, + "step": 22147 + }, + { + "epoch": 0.9139225880993646, + "grad_norm": 4.125844182189881, + "learning_rate": 5.7918499451047155e-08, + "loss": 0.5384, + "step": 22148 + }, + { + "epoch": 0.9139638524387225, + "grad_norm": 3.2295897140593373, + "learning_rate": 5.786334180267477e-08, + "loss": 0.5364, + "step": 22149 + }, + { + "epoch": 0.9140051167780804, + "grad_norm": 3.1704541420384063, + "learning_rate": 5.7808209914470886e-08, + "loss": 0.4991, + "step": 22150 + }, + { + "epoch": 0.9140463811174383, + "grad_norm": 2.5617709561869204, + "learning_rate": 5.7753103787420215e-08, + "loss": 0.4983, + "step": 22151 + }, + { + "epoch": 0.9140876454567962, + "grad_norm": 5.493768606247699, + "learning_rate": 5.7698023422506965e-08, + "loss": 0.5487, + "step": 22152 + }, + { + "epoch": 0.9141289097961541, + "grad_norm": 3.0077764783794767, + "learning_rate": 5.764296882071518e-08, + "loss": 0.4889, + "step": 22153 + }, + { + "epoch": 0.914170174135512, + "grad_norm": 3.000077457854758, + "learning_rate": 5.758793998302808e-08, + "loss": 0.5742, + "step": 22154 + }, + { + "epoch": 0.91421143847487, + "grad_norm": 2.687918276770829, + "learning_rate": 5.7532936910428714e-08, + "loss": 0.5143, + "step": 22155 + }, + { + "epoch": 0.914252702814228, + "grad_norm": 2.3763074932698256, + "learning_rate": 5.747795960389962e-08, + "loss": 0.5921, + "step": 22156 + }, + { + "epoch": 0.9142939671535859, + "grad_norm": 6.180427335641973, + "learning_rate": 5.742300806442302e-08, + "loss": 0.504, + "step": 22157 + }, + { + "epoch": 0.9143352314929438, + "grad_norm": 2.677270492243713, + "learning_rate": 5.736808229298013e-08, + "loss": 0.4829, + "step": 22158 + }, + { + "epoch": 0.9143764958323017, + "grad_norm": 3.2765589242749154, + "learning_rate": 5.731318229055216e-08, + "loss": 0.554, + "step": 22159 + }, + { + "epoch": 0.9144177601716597, + "grad_norm": 2.5591816263231064, + "learning_rate": 5.7258308058119657e-08, + "loss": 0.4939, + "step": 22160 + }, + { + "epoch": 0.9144590245110176, + "grad_norm": 10.554845251087258, + "learning_rate": 5.7203459596663346e-08, + "loss": 0.5193, + "step": 22161 + }, + { + "epoch": 0.9145002888503755, + "grad_norm": 15.369660140071286, + "learning_rate": 5.714863690716227e-08, + "loss": 0.4814, + "step": 22162 + }, + { + "epoch": 0.9145415531897334, + "grad_norm": 3.2884711660073442, + "learning_rate": 5.709383999059597e-08, + "loss": 0.5271, + "step": 22163 + }, + { + "epoch": 0.9145828175290913, + "grad_norm": 3.360451940772533, + "learning_rate": 5.703906884794369e-08, + "loss": 0.4984, + "step": 22164 + }, + { + "epoch": 0.9146240818684492, + "grad_norm": 4.195666568910807, + "learning_rate": 5.698432348018312e-08, + "loss": 0.545, + "step": 22165 + }, + { + "epoch": 0.9146653462078073, + "grad_norm": 18.153193427873347, + "learning_rate": 5.692960388829232e-08, + "loss": 0.5166, + "step": 22166 + }, + { + "epoch": 0.9147066105471652, + "grad_norm": 14.501661136185794, + "learning_rate": 5.687491007324885e-08, + "loss": 0.4917, + "step": 22167 + }, + { + "epoch": 0.9147478748865231, + "grad_norm": 2.0938700153979335, + "learning_rate": 5.682024203602959e-08, + "loss": 0.4419, + "step": 22168 + }, + { + "epoch": 0.914789139225881, + "grad_norm": 11.970082450709107, + "learning_rate": 5.676559977761125e-08, + "loss": 0.4955, + "step": 22169 + }, + { + "epoch": 0.9148304035652389, + "grad_norm": 7.481934174572043, + "learning_rate": 5.6710983298969565e-08, + "loss": 0.5214, + "step": 22170 + }, + { + "epoch": 0.9148716679045968, + "grad_norm": 2.4643902700709717, + "learning_rate": 5.665639260108024e-08, + "loss": 0.5255, + "step": 22171 + }, + { + "epoch": 0.9149129322439548, + "grad_norm": 2.060016578918938, + "learning_rate": 5.6601827684918505e-08, + "loss": 0.5004, + "step": 22172 + }, + { + "epoch": 0.9149541965833127, + "grad_norm": 19.21224823137636, + "learning_rate": 5.6547288551459076e-08, + "loss": 0.5732, + "step": 22173 + }, + { + "epoch": 0.9149954609226706, + "grad_norm": 9.933671387383136, + "learning_rate": 5.649277520167584e-08, + "loss": 0.509, + "step": 22174 + }, + { + "epoch": 0.9150367252620285, + "grad_norm": 3.8135263531289287, + "learning_rate": 5.643828763654285e-08, + "loss": 0.4642, + "step": 22175 + }, + { + "epoch": 0.9150779896013865, + "grad_norm": 3.6523011561654584, + "learning_rate": 5.638382585703317e-08, + "loss": 0.5307, + "step": 22176 + }, + { + "epoch": 0.9151192539407444, + "grad_norm": 3.386988311335152, + "learning_rate": 5.632938986411984e-08, + "loss": 0.4947, + "step": 22177 + }, + { + "epoch": 0.9151605182801024, + "grad_norm": 1.9915488018342695, + "learning_rate": 5.627497965877526e-08, + "loss": 0.4902, + "step": 22178 + }, + { + "epoch": 0.9152017826194603, + "grad_norm": 3.959617009857702, + "learning_rate": 5.6220595241970986e-08, + "loss": 0.514, + "step": 22179 + }, + { + "epoch": 0.9152430469588182, + "grad_norm": 4.0064278912649, + "learning_rate": 5.616623661467873e-08, + "loss": 0.5199, + "step": 22180 + }, + { + "epoch": 0.9152843112981761, + "grad_norm": 2.8619512432584875, + "learning_rate": 5.61119037778694e-08, + "loss": 0.4886, + "step": 22181 + }, + { + "epoch": 0.915325575637534, + "grad_norm": 2.703381258816044, + "learning_rate": 5.605759673251354e-08, + "loss": 0.5241, + "step": 22182 + }, + { + "epoch": 0.9153668399768919, + "grad_norm": 2.636536265179102, + "learning_rate": 5.600331547958104e-08, + "loss": 0.4877, + "step": 22183 + }, + { + "epoch": 0.9154081043162499, + "grad_norm": 3.034422505138954, + "learning_rate": 5.59490600200418e-08, + "loss": 0.5067, + "step": 22184 + }, + { + "epoch": 0.9154493686556078, + "grad_norm": 3.1763709632406494, + "learning_rate": 5.589483035486487e-08, + "loss": 0.5046, + "step": 22185 + }, + { + "epoch": 0.9154906329949658, + "grad_norm": 2.7586226239895844, + "learning_rate": 5.584062648501881e-08, + "loss": 0.5031, + "step": 22186 + }, + { + "epoch": 0.9155318973343237, + "grad_norm": 2.1157973822666927, + "learning_rate": 5.578644841147168e-08, + "loss": 0.4676, + "step": 22187 + }, + { + "epoch": 0.9155731616736816, + "grad_norm": 2.6278597099120824, + "learning_rate": 5.573229613519171e-08, + "loss": 0.5093, + "step": 22188 + }, + { + "epoch": 0.9156144260130396, + "grad_norm": 3.7812750589988657, + "learning_rate": 5.567816965714595e-08, + "loss": 0.5259, + "step": 22189 + }, + { + "epoch": 0.9156556903523975, + "grad_norm": 2.99448790338996, + "learning_rate": 5.56240689783013e-08, + "loss": 0.5356, + "step": 22190 + }, + { + "epoch": 0.9156969546917554, + "grad_norm": 4.359901472910686, + "learning_rate": 5.5569994099623825e-08, + "loss": 0.5022, + "step": 22191 + }, + { + "epoch": 0.9157382190311133, + "grad_norm": 4.641622449747543, + "learning_rate": 5.551594502207957e-08, + "loss": 0.4953, + "step": 22192 + }, + { + "epoch": 0.9157794833704712, + "grad_norm": 4.235266018392532, + "learning_rate": 5.546192174663428e-08, + "loss": 0.5037, + "step": 22193 + }, + { + "epoch": 0.9158207477098291, + "grad_norm": 3.467976383610358, + "learning_rate": 5.540792427425284e-08, + "loss": 0.5325, + "step": 22194 + }, + { + "epoch": 0.915862012049187, + "grad_norm": 10.374404438385218, + "learning_rate": 5.535395260589948e-08, + "loss": 0.5492, + "step": 22195 + }, + { + "epoch": 0.9159032763885451, + "grad_norm": 2.569789409041672, + "learning_rate": 5.530000674253843e-08, + "loss": 0.4974, + "step": 22196 + }, + { + "epoch": 0.915944540727903, + "grad_norm": 3.6056284412069584, + "learning_rate": 5.5246086685133424e-08, + "loss": 0.4845, + "step": 22197 + }, + { + "epoch": 0.9159858050672609, + "grad_norm": 8.467331635208764, + "learning_rate": 5.519219243464768e-08, + "loss": 0.543, + "step": 22198 + }, + { + "epoch": 0.9160270694066188, + "grad_norm": 4.348486340943108, + "learning_rate": 5.5138323992043606e-08, + "loss": 0.5122, + "step": 22199 + }, + { + "epoch": 0.9160683337459767, + "grad_norm": 2.7933585026169836, + "learning_rate": 5.508448135828359e-08, + "loss": 0.4756, + "step": 22200 + }, + { + "epoch": 0.9161095980853347, + "grad_norm": 3.4963310372542926, + "learning_rate": 5.50306645343292e-08, + "loss": 0.4781, + "step": 22201 + }, + { + "epoch": 0.9161508624246926, + "grad_norm": 3.4359302744503957, + "learning_rate": 5.497687352114217e-08, + "loss": 0.4702, + "step": 22202 + }, + { + "epoch": 0.9161921267640505, + "grad_norm": 2.864992278287761, + "learning_rate": 5.4923108319683055e-08, + "loss": 0.4911, + "step": 22203 + }, + { + "epoch": 0.9162333911034084, + "grad_norm": 2.8176387898990294, + "learning_rate": 5.486936893091227e-08, + "loss": 0.52, + "step": 22204 + }, + { + "epoch": 0.9162746554427663, + "grad_norm": 2.5743767692826416, + "learning_rate": 5.481565535578953e-08, + "loss": 0.5137, + "step": 22205 + }, + { + "epoch": 0.9163159197821242, + "grad_norm": 2.705384136785887, + "learning_rate": 5.4761967595274576e-08, + "loss": 0.5584, + "step": 22206 + }, + { + "epoch": 0.9163571841214823, + "grad_norm": 3.026684911634661, + "learning_rate": 5.4708305650326304e-08, + "loss": 0.5255, + "step": 22207 + }, + { + "epoch": 0.9163984484608402, + "grad_norm": 8.6501677679996, + "learning_rate": 5.465466952190312e-08, + "loss": 0.512, + "step": 22208 + }, + { + "epoch": 0.9164397128001981, + "grad_norm": 3.943369142742242, + "learning_rate": 5.4601059210963425e-08, + "loss": 0.472, + "step": 22209 + }, + { + "epoch": 0.916480977139556, + "grad_norm": 3.1321977307219138, + "learning_rate": 5.4547474718464606e-08, + "loss": 0.5139, + "step": 22210 + }, + { + "epoch": 0.9165222414789139, + "grad_norm": 4.894636416159297, + "learning_rate": 5.449391604536391e-08, + "loss": 0.5047, + "step": 22211 + }, + { + "epoch": 0.9165635058182718, + "grad_norm": 4.018622206697265, + "learning_rate": 5.444038319261774e-08, + "loss": 0.5098, + "step": 22212 + }, + { + "epoch": 0.9166047701576298, + "grad_norm": 2.9699361730992293, + "learning_rate": 5.438687616118265e-08, + "loss": 0.5153, + "step": 22213 + }, + { + "epoch": 0.9166460344969877, + "grad_norm": 2.789714872180494, + "learning_rate": 5.433339495201456e-08, + "loss": 0.4846, + "step": 22214 + }, + { + "epoch": 0.9166872988363456, + "grad_norm": 4.878420604660975, + "learning_rate": 5.427993956606836e-08, + "loss": 0.4769, + "step": 22215 + }, + { + "epoch": 0.9167285631757035, + "grad_norm": 12.905379722029897, + "learning_rate": 5.422651000429896e-08, + "loss": 0.5641, + "step": 22216 + }, + { + "epoch": 0.9167698275150615, + "grad_norm": 2.7470437400145125, + "learning_rate": 5.417310626766125e-08, + "loss": 0.4679, + "step": 22217 + }, + { + "epoch": 0.9168110918544194, + "grad_norm": 2.9413504160059594, + "learning_rate": 5.411972835710832e-08, + "loss": 0.5715, + "step": 22218 + }, + { + "epoch": 0.9168523561937774, + "grad_norm": 5.147345612008602, + "learning_rate": 5.4066376273594564e-08, + "loss": 0.5396, + "step": 22219 + }, + { + "epoch": 0.9168936205331353, + "grad_norm": 11.131931992876968, + "learning_rate": 5.4013050018072055e-08, + "loss": 0.4651, + "step": 22220 + }, + { + "epoch": 0.9169348848724932, + "grad_norm": 2.8899897344526995, + "learning_rate": 5.395974959149386e-08, + "loss": 0.5606, + "step": 22221 + }, + { + "epoch": 0.9169761492118511, + "grad_norm": 7.698436231022689, + "learning_rate": 5.3906474994812217e-08, + "loss": 0.4952, + "step": 22222 + }, + { + "epoch": 0.917017413551209, + "grad_norm": 4.797862927593946, + "learning_rate": 5.3853226228978203e-08, + "loss": 0.5655, + "step": 22223 + }, + { + "epoch": 0.9170586778905669, + "grad_norm": 3.5426754260860385, + "learning_rate": 5.380000329494339e-08, + "loss": 0.5497, + "step": 22224 + }, + { + "epoch": 0.9170999422299249, + "grad_norm": 7.028864294251748, + "learning_rate": 5.374680619365835e-08, + "loss": 0.5029, + "step": 22225 + }, + { + "epoch": 0.9171412065692828, + "grad_norm": 3.434502890201431, + "learning_rate": 5.369363492607349e-08, + "loss": 0.5014, + "step": 22226 + }, + { + "epoch": 0.9171824709086408, + "grad_norm": 8.478780710127033, + "learning_rate": 5.364048949313821e-08, + "loss": 0.5046, + "step": 22227 + }, + { + "epoch": 0.9172237352479987, + "grad_norm": 2.5187980181525904, + "learning_rate": 5.35873698958021e-08, + "loss": 0.4842, + "step": 22228 + }, + { + "epoch": 0.9172649995873566, + "grad_norm": 2.9885459594800476, + "learning_rate": 5.353427613501388e-08, + "loss": 0.5175, + "step": 22229 + }, + { + "epoch": 0.9173062639267145, + "grad_norm": 2.2652215813429533, + "learning_rate": 5.3481208211722146e-08, + "loss": 0.4919, + "step": 22230 + }, + { + "epoch": 0.9173475282660725, + "grad_norm": 4.28645257025743, + "learning_rate": 5.3428166126874624e-08, + "loss": 0.4567, + "step": 22231 + }, + { + "epoch": 0.9173887926054304, + "grad_norm": 3.2887825926267884, + "learning_rate": 5.337514988141873e-08, + "loss": 0.5507, + "step": 22232 + }, + { + "epoch": 0.9174300569447883, + "grad_norm": 2.7919404058824866, + "learning_rate": 5.3322159476301536e-08, + "loss": 0.4856, + "step": 22233 + }, + { + "epoch": 0.9174713212841462, + "grad_norm": 10.083289890348013, + "learning_rate": 5.326919491246962e-08, + "loss": 0.51, + "step": 22234 + }, + { + "epoch": 0.9175125856235041, + "grad_norm": 2.742309494793723, + "learning_rate": 5.321625619086906e-08, + "loss": 0.495, + "step": 22235 + }, + { + "epoch": 0.917553849962862, + "grad_norm": 2.477496552670229, + "learning_rate": 5.316334331244543e-08, + "loss": 0.4893, + "step": 22236 + }, + { + "epoch": 0.9175951143022201, + "grad_norm": 4.279003516880022, + "learning_rate": 5.3110456278143984e-08, + "loss": 0.5832, + "step": 22237 + }, + { + "epoch": 0.917636378641578, + "grad_norm": 2.7129573881097997, + "learning_rate": 5.305759508890928e-08, + "loss": 0.4901, + "step": 22238 + }, + { + "epoch": 0.9176776429809359, + "grad_norm": 3.309460607723671, + "learning_rate": 5.3004759745685915e-08, + "loss": 0.4803, + "step": 22239 + }, + { + "epoch": 0.9177189073202938, + "grad_norm": 2.744228275987953, + "learning_rate": 5.295195024941696e-08, + "loss": 0.495, + "step": 22240 + }, + { + "epoch": 0.9177601716596517, + "grad_norm": 6.080973803766631, + "learning_rate": 5.2899166601046323e-08, + "loss": 0.4997, + "step": 22241 + }, + { + "epoch": 0.9178014359990097, + "grad_norm": 4.469227181644915, + "learning_rate": 5.284640880151675e-08, + "loss": 0.4777, + "step": 22242 + }, + { + "epoch": 0.9178427003383676, + "grad_norm": 3.2960664458691205, + "learning_rate": 5.27936768517705e-08, + "loss": 0.5571, + "step": 22243 + }, + { + "epoch": 0.9178839646777255, + "grad_norm": 2.4642191018858126, + "learning_rate": 5.274097075274947e-08, + "loss": 0.4931, + "step": 22244 + }, + { + "epoch": 0.9179252290170834, + "grad_norm": 3.60056495226913, + "learning_rate": 5.268829050539525e-08, + "loss": 0.5179, + "step": 22245 + }, + { + "epoch": 0.9179664933564413, + "grad_norm": 3.8206608206880506, + "learning_rate": 5.263563611064859e-08, + "loss": 0.4885, + "step": 22246 + }, + { + "epoch": 0.9180077576957993, + "grad_norm": 3.602446564467673, + "learning_rate": 5.2583007569450557e-08, + "loss": 0.4866, + "step": 22247 + }, + { + "epoch": 0.9180490220351573, + "grad_norm": 3.1688402202012296, + "learning_rate": 5.253040488274058e-08, + "loss": 0.4823, + "step": 22248 + }, + { + "epoch": 0.9180902863745152, + "grad_norm": 4.399153595166108, + "learning_rate": 5.2477828051458734e-08, + "loss": 0.5121, + "step": 22249 + }, + { + "epoch": 0.9181315507138731, + "grad_norm": 2.8704771482783586, + "learning_rate": 5.242527707654393e-08, + "loss": 0.5164, + "step": 22250 + }, + { + "epoch": 0.918172815053231, + "grad_norm": 2.5004297097666166, + "learning_rate": 5.23727519589351e-08, + "loss": 0.4863, + "step": 22251 + }, + { + "epoch": 0.9182140793925889, + "grad_norm": 5.939686311386834, + "learning_rate": 5.232025269957014e-08, + "loss": 0.5315, + "step": 22252 + }, + { + "epoch": 0.9182553437319468, + "grad_norm": 19.89110773982376, + "learning_rate": 5.2267779299387145e-08, + "loss": 0.4521, + "step": 22253 + }, + { + "epoch": 0.9182966080713048, + "grad_norm": 2.177869391108097, + "learning_rate": 5.221533175932319e-08, + "loss": 0.5559, + "step": 22254 + }, + { + "epoch": 0.9183378724106627, + "grad_norm": 1.797859578781097, + "learning_rate": 5.216291008031537e-08, + "loss": 0.4586, + "step": 22255 + }, + { + "epoch": 0.9183791367500206, + "grad_norm": 2.184594047265549, + "learning_rate": 5.2110514263299933e-08, + "loss": 0.5565, + "step": 22256 + }, + { + "epoch": 0.9184204010893786, + "grad_norm": 2.7759560969583523, + "learning_rate": 5.205814430921263e-08, + "loss": 0.473, + "step": 22257 + }, + { + "epoch": 0.9184616654287365, + "grad_norm": 4.837168335837051, + "learning_rate": 5.2005800218989044e-08, + "loss": 0.5429, + "step": 22258 + }, + { + "epoch": 0.9185029297680944, + "grad_norm": 4.672113312913826, + "learning_rate": 5.1953481993564254e-08, + "loss": 0.4767, + "step": 22259 + }, + { + "epoch": 0.9185441941074524, + "grad_norm": 3.0150770257602253, + "learning_rate": 5.190118963387269e-08, + "loss": 0.4951, + "step": 22260 + }, + { + "epoch": 0.9185854584468103, + "grad_norm": 10.232776120449698, + "learning_rate": 5.184892314084844e-08, + "loss": 0.5196, + "step": 22261 + }, + { + "epoch": 0.9186267227861682, + "grad_norm": 3.9620177153590403, + "learning_rate": 5.179668251542524e-08, + "loss": 0.4549, + "step": 22262 + }, + { + "epoch": 0.9186679871255261, + "grad_norm": 3.714332820406475, + "learning_rate": 5.17444677585362e-08, + "loss": 0.5023, + "step": 22263 + }, + { + "epoch": 0.918709251464884, + "grad_norm": 3.973768621665909, + "learning_rate": 5.1692278871113717e-08, + "loss": 0.5099, + "step": 22264 + }, + { + "epoch": 0.9187505158042419, + "grad_norm": 2.2500769874987756, + "learning_rate": 5.16401158540904e-08, + "loss": 0.4616, + "step": 22265 + }, + { + "epoch": 0.9187917801435999, + "grad_norm": 2.421718736164999, + "learning_rate": 5.1587978708397653e-08, + "loss": 0.4903, + "step": 22266 + }, + { + "epoch": 0.9188330444829578, + "grad_norm": 2.0933089993110436, + "learning_rate": 5.153586743496741e-08, + "loss": 0.4791, + "step": 22267 + }, + { + "epoch": 0.9188743088223158, + "grad_norm": 3.3214419762067937, + "learning_rate": 5.1483782034729757e-08, + "loss": 0.5276, + "step": 22268 + }, + { + "epoch": 0.9189155731616737, + "grad_norm": 2.380991956269738, + "learning_rate": 5.143172250861544e-08, + "loss": 0.4973, + "step": 22269 + }, + { + "epoch": 0.9189568375010316, + "grad_norm": 2.5389799601962073, + "learning_rate": 5.1379688857554394e-08, + "loss": 0.502, + "step": 22270 + }, + { + "epoch": 0.9189981018403895, + "grad_norm": 3.0074543566575676, + "learning_rate": 5.132768108247587e-08, + "loss": 0.5388, + "step": 22271 + }, + { + "epoch": 0.9190393661797475, + "grad_norm": 3.0808530323710266, + "learning_rate": 5.127569918430913e-08, + "loss": 0.5295, + "step": 22272 + }, + { + "epoch": 0.9190806305191054, + "grad_norm": 6.798716061066478, + "learning_rate": 5.1223743163982425e-08, + "loss": 0.4764, + "step": 22273 + }, + { + "epoch": 0.9191218948584633, + "grad_norm": 2.7435875900353865, + "learning_rate": 5.117181302242402e-08, + "loss": 0.4911, + "step": 22274 + }, + { + "epoch": 0.9191631591978212, + "grad_norm": 3.5974917326651896, + "learning_rate": 5.1119908760561327e-08, + "loss": 0.4567, + "step": 22275 + }, + { + "epoch": 0.9192044235371791, + "grad_norm": 5.035895783996395, + "learning_rate": 5.106803037932178e-08, + "loss": 0.4941, + "step": 22276 + }, + { + "epoch": 0.919245687876537, + "grad_norm": 2.9268322812501237, + "learning_rate": 5.101617787963181e-08, + "loss": 0.4711, + "step": 22277 + }, + { + "epoch": 0.9192869522158951, + "grad_norm": 5.291356790012011, + "learning_rate": 5.0964351262417664e-08, + "loss": 0.589, + "step": 22278 + }, + { + "epoch": 0.919328216555253, + "grad_norm": 4.291133870164723, + "learning_rate": 5.091255052860511e-08, + "loss": 0.536, + "step": 22279 + }, + { + "epoch": 0.9193694808946109, + "grad_norm": 2.6435429593026405, + "learning_rate": 5.086077567911973e-08, + "loss": 0.5119, + "step": 22280 + }, + { + "epoch": 0.9194107452339688, + "grad_norm": 5.681960161869492, + "learning_rate": 5.08090267148858e-08, + "loss": 0.4989, + "step": 22281 + }, + { + "epoch": 0.9194520095733267, + "grad_norm": 2.2468556357792067, + "learning_rate": 5.075730363682823e-08, + "loss": 0.4945, + "step": 22282 + }, + { + "epoch": 0.9194932739126846, + "grad_norm": 2.8213378702515626, + "learning_rate": 5.07056064458703e-08, + "loss": 0.5369, + "step": 22283 + }, + { + "epoch": 0.9195345382520426, + "grad_norm": 4.066932779928949, + "learning_rate": 5.065393514293626e-08, + "loss": 0.5099, + "step": 22284 + }, + { + "epoch": 0.9195758025914005, + "grad_norm": 3.1013317149333184, + "learning_rate": 5.060228972894837e-08, + "loss": 0.5366, + "step": 22285 + }, + { + "epoch": 0.9196170669307584, + "grad_norm": 5.145869716855649, + "learning_rate": 5.0550670204829396e-08, + "loss": 0.5152, + "step": 22286 + }, + { + "epoch": 0.9196583312701163, + "grad_norm": 2.305642298500847, + "learning_rate": 5.049907657150143e-08, + "loss": 0.4969, + "step": 22287 + }, + { + "epoch": 0.9196995956094743, + "grad_norm": 2.160398232952279, + "learning_rate": 5.0447508829886246e-08, + "loss": 0.494, + "step": 22288 + }, + { + "epoch": 0.9197408599488323, + "grad_norm": 11.603409669305988, + "learning_rate": 5.03959669809046e-08, + "loss": 0.5037, + "step": 22289 + }, + { + "epoch": 0.9197821242881902, + "grad_norm": 2.356914402757732, + "learning_rate": 5.034445102547725e-08, + "loss": 0.5426, + "step": 22290 + }, + { + "epoch": 0.9198233886275481, + "grad_norm": 4.600958634994387, + "learning_rate": 5.029296096452463e-08, + "loss": 0.4809, + "step": 22291 + }, + { + "epoch": 0.919864652966906, + "grad_norm": 4.729849723243728, + "learning_rate": 5.0241496798966334e-08, + "loss": 0.4784, + "step": 22292 + }, + { + "epoch": 0.9199059173062639, + "grad_norm": 2.745939859197945, + "learning_rate": 5.019005852972164e-08, + "loss": 0.5198, + "step": 22293 + }, + { + "epoch": 0.9199471816456218, + "grad_norm": 2.8547199215003833, + "learning_rate": 5.01386461577093e-08, + "loss": 0.4917, + "step": 22294 + }, + { + "epoch": 0.9199884459849798, + "grad_norm": 6.767758617600285, + "learning_rate": 5.008725968384792e-08, + "loss": 0.5237, + "step": 22295 + }, + { + "epoch": 0.9200297103243377, + "grad_norm": 3.3552105095167857, + "learning_rate": 5.003589910905509e-08, + "loss": 0.5898, + "step": 22296 + }, + { + "epoch": 0.9200709746636956, + "grad_norm": 86.36354589661148, + "learning_rate": 4.998456443424843e-08, + "loss": 0.5655, + "step": 22297 + }, + { + "epoch": 0.9201122390030536, + "grad_norm": 4.594370000426614, + "learning_rate": 4.993325566034468e-08, + "loss": 0.5575, + "step": 22298 + }, + { + "epoch": 0.9201535033424115, + "grad_norm": 3.396686769685049, + "learning_rate": 4.988197278826062e-08, + "loss": 0.5244, + "step": 22299 + }, + { + "epoch": 0.9201947676817694, + "grad_norm": 6.219935100979587, + "learning_rate": 4.9830715818912184e-08, + "loss": 0.4735, + "step": 22300 + }, + { + "epoch": 0.9202360320211274, + "grad_norm": 2.327428530375595, + "learning_rate": 4.977948475321481e-08, + "loss": 0.5396, + "step": 22301 + }, + { + "epoch": 0.9202772963604853, + "grad_norm": 2.784557437531218, + "learning_rate": 4.972827959208376e-08, + "loss": 0.5053, + "step": 22302 + }, + { + "epoch": 0.9203185606998432, + "grad_norm": 6.493491085835276, + "learning_rate": 4.967710033643363e-08, + "loss": 0.5278, + "step": 22303 + }, + { + "epoch": 0.9203598250392011, + "grad_norm": 4.5374592807146445, + "learning_rate": 4.962594698717887e-08, + "loss": 0.4803, + "step": 22304 + }, + { + "epoch": 0.920401089378559, + "grad_norm": 5.52724910568194, + "learning_rate": 4.957481954523274e-08, + "loss": 0.503, + "step": 22305 + }, + { + "epoch": 0.9204423537179169, + "grad_norm": 5.355604292456528, + "learning_rate": 4.952371801150868e-08, + "loss": 0.5009, + "step": 22306 + }, + { + "epoch": 0.9204836180572749, + "grad_norm": 2.410086610413743, + "learning_rate": 4.947264238691962e-08, + "loss": 0.4874, + "step": 22307 + }, + { + "epoch": 0.9205248823966329, + "grad_norm": 2.1418646730422677, + "learning_rate": 4.9421592672378e-08, + "loss": 0.5172, + "step": 22308 + }, + { + "epoch": 0.9205661467359908, + "grad_norm": 3.699677211978941, + "learning_rate": 4.937056886879559e-08, + "loss": 0.4998, + "step": 22309 + }, + { + "epoch": 0.9206074110753487, + "grad_norm": 3.375886384068109, + "learning_rate": 4.9319570977083504e-08, + "loss": 0.4864, + "step": 22310 + }, + { + "epoch": 0.9206486754147066, + "grad_norm": 2.568617102988599, + "learning_rate": 4.9268598998153e-08, + "loss": 0.4367, + "step": 22311 + }, + { + "epoch": 0.9206899397540645, + "grad_norm": 4.097057039000812, + "learning_rate": 4.921765293291436e-08, + "loss": 0.4321, + "step": 22312 + }, + { + "epoch": 0.9207312040934225, + "grad_norm": 2.637590654835761, + "learning_rate": 4.916673278227801e-08, + "loss": 0.5201, + "step": 22313 + }, + { + "epoch": 0.9207724684327804, + "grad_norm": 4.413745374521684, + "learning_rate": 4.911583854715307e-08, + "loss": 0.5678, + "step": 22314 + }, + { + "epoch": 0.9208137327721383, + "grad_norm": 1.9609583991074455, + "learning_rate": 4.90649702284488e-08, + "loss": 0.4494, + "step": 22315 + }, + { + "epoch": 0.9208549971114962, + "grad_norm": 23.142206177010117, + "learning_rate": 4.9014127827073816e-08, + "loss": 0.486, + "step": 22316 + }, + { + "epoch": 0.9208962614508541, + "grad_norm": 2.7099039249414054, + "learning_rate": 4.896331134393639e-08, + "loss": 0.5255, + "step": 22317 + }, + { + "epoch": 0.9209375257902122, + "grad_norm": 118.34578612809418, + "learning_rate": 4.891252077994413e-08, + "loss": 0.4963, + "step": 22318 + }, + { + "epoch": 0.9209787901295701, + "grad_norm": 3.3210932780089415, + "learning_rate": 4.88617561360043e-08, + "loss": 0.4807, + "step": 22319 + }, + { + "epoch": 0.921020054468928, + "grad_norm": 2.3853476433009853, + "learning_rate": 4.881101741302385e-08, + "loss": 0.4779, + "step": 22320 + }, + { + "epoch": 0.9210613188082859, + "grad_norm": 2.41775475380167, + "learning_rate": 4.876030461190889e-08, + "loss": 0.4265, + "step": 22321 + }, + { + "epoch": 0.9211025831476438, + "grad_norm": 3.913351102517065, + "learning_rate": 4.870961773356536e-08, + "loss": 0.5391, + "step": 22322 + }, + { + "epoch": 0.9211438474870017, + "grad_norm": 3.954271601290105, + "learning_rate": 4.8658956778898535e-08, + "loss": 0.4913, + "step": 22323 + }, + { + "epoch": 0.9211851118263596, + "grad_norm": 10.681154011987262, + "learning_rate": 4.8608321748813357e-08, + "loss": 0.549, + "step": 22324 + }, + { + "epoch": 0.9212263761657176, + "grad_norm": 2.5862975121056375, + "learning_rate": 4.8557712644214605e-08, + "loss": 0.5123, + "step": 22325 + }, + { + "epoch": 0.9212676405050755, + "grad_norm": 2.656119262640267, + "learning_rate": 4.8507129466005887e-08, + "loss": 0.5461, + "step": 22326 + }, + { + "epoch": 0.9213089048444334, + "grad_norm": 3.6802805586685454, + "learning_rate": 4.845657221509081e-08, + "loss": 0.5004, + "step": 22327 + }, + { + "epoch": 0.9213501691837913, + "grad_norm": 2.6331643054983362, + "learning_rate": 4.840604089237266e-08, + "loss": 0.5275, + "step": 22328 + }, + { + "epoch": 0.9213914335231493, + "grad_norm": 2.008439942304052, + "learning_rate": 4.835553549875421e-08, + "loss": 0.5235, + "step": 22329 + }, + { + "epoch": 0.9214326978625073, + "grad_norm": 2.241626601791775, + "learning_rate": 4.830505603513708e-08, + "loss": 0.5235, + "step": 22330 + }, + { + "epoch": 0.9214739622018652, + "grad_norm": 2.6461424802879465, + "learning_rate": 4.8254602502423194e-08, + "loss": 0.5033, + "step": 22331 + }, + { + "epoch": 0.9215152265412231, + "grad_norm": 3.554807248163201, + "learning_rate": 4.820417490151385e-08, + "loss": 0.4812, + "step": 22332 + }, + { + "epoch": 0.921556490880581, + "grad_norm": 13.686052567147431, + "learning_rate": 4.815377323331e-08, + "loss": 0.4928, + "step": 22333 + }, + { + "epoch": 0.9215977552199389, + "grad_norm": 4.205794807246196, + "learning_rate": 4.810339749871157e-08, + "loss": 0.5089, + "step": 22334 + }, + { + "epoch": 0.9216390195592968, + "grad_norm": 1.9532704112681447, + "learning_rate": 4.805304769861868e-08, + "loss": 0.4911, + "step": 22335 + }, + { + "epoch": 0.9216802838986548, + "grad_norm": 2.9732598554845797, + "learning_rate": 4.8002723833930285e-08, + "loss": 0.4714, + "step": 22336 + }, + { + "epoch": 0.9217215482380127, + "grad_norm": 2.227877080560213, + "learning_rate": 4.795242590554583e-08, + "loss": 0.4792, + "step": 22337 + }, + { + "epoch": 0.9217628125773706, + "grad_norm": 3.1880425431201793, + "learning_rate": 4.7902153914363424e-08, + "loss": 0.515, + "step": 22338 + }, + { + "epoch": 0.9218040769167286, + "grad_norm": 6.88004760587802, + "learning_rate": 4.7851907861281184e-08, + "loss": 0.544, + "step": 22339 + }, + { + "epoch": 0.9218453412560865, + "grad_norm": 3.2724401869982183, + "learning_rate": 4.78016877471964e-08, + "loss": 0.5304, + "step": 22340 + }, + { + "epoch": 0.9218866055954444, + "grad_norm": 4.7842033837921525, + "learning_rate": 4.7751493573006675e-08, + "loss": 0.4847, + "step": 22341 + }, + { + "epoch": 0.9219278699348024, + "grad_norm": 3.5406261950476505, + "learning_rate": 4.770132533960797e-08, + "loss": 0.4682, + "step": 22342 + }, + { + "epoch": 0.9219691342741603, + "grad_norm": 13.295749625415606, + "learning_rate": 4.765118304789673e-08, + "loss": 0.4925, + "step": 22343 + }, + { + "epoch": 0.9220103986135182, + "grad_norm": 3.986853198656917, + "learning_rate": 4.7601066698768404e-08, + "loss": 0.5257, + "step": 22344 + }, + { + "epoch": 0.9220516629528761, + "grad_norm": 2.50956807371189, + "learning_rate": 4.7550976293118775e-08, + "loss": 0.526, + "step": 22345 + }, + { + "epoch": 0.922092927292234, + "grad_norm": 2.9753314245889886, + "learning_rate": 4.7500911831841965e-08, + "loss": 0.5095, + "step": 22346 + }, + { + "epoch": 0.9221341916315919, + "grad_norm": 5.659468990467811, + "learning_rate": 4.745087331583242e-08, + "loss": 0.4907, + "step": 22347 + }, + { + "epoch": 0.9221754559709499, + "grad_norm": 2.7683811480157443, + "learning_rate": 4.74008607459841e-08, + "loss": 0.5308, + "step": 22348 + }, + { + "epoch": 0.9222167203103079, + "grad_norm": 4.076720829444363, + "learning_rate": 4.7350874123190114e-08, + "loss": 0.5169, + "step": 22349 + }, + { + "epoch": 0.9222579846496658, + "grad_norm": 8.061508720966737, + "learning_rate": 4.7300913448343754e-08, + "loss": 0.4599, + "step": 22350 + }, + { + "epoch": 0.9222992489890237, + "grad_norm": 4.301420711300041, + "learning_rate": 4.725097872233697e-08, + "loss": 0.4959, + "step": 22351 + }, + { + "epoch": 0.9223405133283816, + "grad_norm": 2.0108707468037537, + "learning_rate": 4.7201069946061884e-08, + "loss": 0.5062, + "step": 22352 + }, + { + "epoch": 0.9223817776677395, + "grad_norm": 3.178098983382817, + "learning_rate": 4.7151187120410114e-08, + "loss": 0.5651, + "step": 22353 + }, + { + "epoch": 0.9224230420070975, + "grad_norm": 2.94451116929991, + "learning_rate": 4.710133024627278e-08, + "loss": 0.5168, + "step": 22354 + }, + { + "epoch": 0.9224643063464554, + "grad_norm": 2.591076895758279, + "learning_rate": 4.7051499324540005e-08, + "loss": 0.4777, + "step": 22355 + }, + { + "epoch": 0.9225055706858133, + "grad_norm": 4.084377598579668, + "learning_rate": 4.700169435610224e-08, + "loss": 0.5521, + "step": 22356 + }, + { + "epoch": 0.9225468350251712, + "grad_norm": 2.8061927263324136, + "learning_rate": 4.6951915341849116e-08, + "loss": 0.5536, + "step": 22357 + }, + { + "epoch": 0.9225880993645291, + "grad_norm": 3.2767249959189315, + "learning_rate": 4.690216228266975e-08, + "loss": 0.5483, + "step": 22358 + }, + { + "epoch": 0.9226293637038872, + "grad_norm": 2.908967398836911, + "learning_rate": 4.685243517945292e-08, + "loss": 0.5042, + "step": 22359 + }, + { + "epoch": 0.9226706280432451, + "grad_norm": 3.811558927727357, + "learning_rate": 4.68027340330866e-08, + "loss": 0.5688, + "step": 22360 + }, + { + "epoch": 0.922711892382603, + "grad_norm": 3.8485293760850823, + "learning_rate": 4.675305884445907e-08, + "loss": 0.5, + "step": 22361 + }, + { + "epoch": 0.9227531567219609, + "grad_norm": 3.670445782037349, + "learning_rate": 4.6703409614457294e-08, + "loss": 0.5203, + "step": 22362 + }, + { + "epoch": 0.9227944210613188, + "grad_norm": 7.748456414350707, + "learning_rate": 4.665378634396822e-08, + "loss": 0.5204, + "step": 22363 + }, + { + "epoch": 0.9228356854006767, + "grad_norm": 3.310914255277555, + "learning_rate": 4.6604189033878144e-08, + "loss": 0.5144, + "step": 22364 + }, + { + "epoch": 0.9228769497400346, + "grad_norm": 3.8359274809789863, + "learning_rate": 4.655461768507302e-08, + "loss": 0.5031, + "step": 22365 + }, + { + "epoch": 0.9229182140793926, + "grad_norm": 11.092731551204945, + "learning_rate": 4.650507229843865e-08, + "loss": 0.5228, + "step": 22366 + }, + { + "epoch": 0.9229594784187505, + "grad_norm": 11.922048127174843, + "learning_rate": 4.645555287485964e-08, + "loss": 0.4898, + "step": 22367 + }, + { + "epoch": 0.9230007427581084, + "grad_norm": 2.5907881126233527, + "learning_rate": 4.640605941522047e-08, + "loss": 0.4341, + "step": 22368 + }, + { + "epoch": 0.9230420070974664, + "grad_norm": 3.452588904470092, + "learning_rate": 4.635659192040559e-08, + "loss": 0.4834, + "step": 22369 + }, + { + "epoch": 0.9230832714368243, + "grad_norm": 2.7687581918099533, + "learning_rate": 4.630715039129862e-08, + "loss": 0.5099, + "step": 22370 + }, + { + "epoch": 0.9231245357761823, + "grad_norm": 2.3124569264586037, + "learning_rate": 4.6257734828782204e-08, + "loss": 0.5183, + "step": 22371 + }, + { + "epoch": 0.9231658001155402, + "grad_norm": 2.9216407337003605, + "learning_rate": 4.6208345233739457e-08, + "loss": 0.6071, + "step": 22372 + }, + { + "epoch": 0.9232070644548981, + "grad_norm": 3.4088204874798644, + "learning_rate": 4.6158981607052506e-08, + "loss": 0.5151, + "step": 22373 + }, + { + "epoch": 0.923248328794256, + "grad_norm": 2.7338811930864533, + "learning_rate": 4.610964394960332e-08, + "loss": 0.4487, + "step": 22374 + }, + { + "epoch": 0.9232895931336139, + "grad_norm": 45.17948569717584, + "learning_rate": 4.606033226227285e-08, + "loss": 0.489, + "step": 22375 + }, + { + "epoch": 0.9233308574729718, + "grad_norm": 3.463974522429446, + "learning_rate": 4.6011046545941906e-08, + "loss": 0.5557, + "step": 22376 + }, + { + "epoch": 0.9233721218123297, + "grad_norm": 3.535241016659391, + "learning_rate": 4.596178680149093e-08, + "loss": 0.5469, + "step": 22377 + }, + { + "epoch": 0.9234133861516877, + "grad_norm": 1.9720510947425598, + "learning_rate": 4.591255302980024e-08, + "loss": 0.4846, + "step": 22378 + }, + { + "epoch": 0.9234546504910457, + "grad_norm": 6.774690146220754, + "learning_rate": 4.5863345231748624e-08, + "loss": 0.5208, + "step": 22379 + }, + { + "epoch": 0.9234959148304036, + "grad_norm": 3.1768630194235086, + "learning_rate": 4.581416340821537e-08, + "loss": 0.537, + "step": 22380 + }, + { + "epoch": 0.9235371791697615, + "grad_norm": 2.900766270503093, + "learning_rate": 4.5765007560078956e-08, + "loss": 0.5423, + "step": 22381 + }, + { + "epoch": 0.9235784435091194, + "grad_norm": 3.8739712229863135, + "learning_rate": 4.5715877688217665e-08, + "loss": 0.4846, + "step": 22382 + }, + { + "epoch": 0.9236197078484774, + "grad_norm": 5.887174119790257, + "learning_rate": 4.566677379350864e-08, + "loss": 0.4736, + "step": 22383 + }, + { + "epoch": 0.9236609721878353, + "grad_norm": 2.187826139927145, + "learning_rate": 4.561769587682918e-08, + "loss": 0.5081, + "step": 22384 + }, + { + "epoch": 0.9237022365271932, + "grad_norm": 23.599607770183262, + "learning_rate": 4.5568643939056074e-08, + "loss": 0.5671, + "step": 22385 + }, + { + "epoch": 0.9237435008665511, + "grad_norm": 2.3736207800512266, + "learning_rate": 4.551961798106563e-08, + "loss": 0.4941, + "step": 22386 + }, + { + "epoch": 0.923784765205909, + "grad_norm": 2.1439921487032154, + "learning_rate": 4.547061800373315e-08, + "loss": 0.4671, + "step": 22387 + }, + { + "epoch": 0.9238260295452669, + "grad_norm": 2.99397719429742, + "learning_rate": 4.5421644007934085e-08, + "loss": 0.4894, + "step": 22388 + }, + { + "epoch": 0.9238672938846249, + "grad_norm": 2.3327598799760842, + "learning_rate": 4.537269599454324e-08, + "loss": 0.5536, + "step": 22389 + }, + { + "epoch": 0.9239085582239829, + "grad_norm": 4.299728846966288, + "learning_rate": 4.53237739644351e-08, + "loss": 0.5727, + "step": 22390 + }, + { + "epoch": 0.9239498225633408, + "grad_norm": 2.544612304477351, + "learning_rate": 4.527487791848345e-08, + "loss": 0.5228, + "step": 22391 + }, + { + "epoch": 0.9239910869026987, + "grad_norm": 27.923656608447487, + "learning_rate": 4.5226007857561424e-08, + "loss": 0.4404, + "step": 22392 + }, + { + "epoch": 0.9240323512420566, + "grad_norm": 3.59699282523945, + "learning_rate": 4.5177163782542165e-08, + "loss": 0.5457, + "step": 22393 + }, + { + "epoch": 0.9240736155814145, + "grad_norm": 8.627848327860567, + "learning_rate": 4.51283456942983e-08, + "loss": 0.4188, + "step": 22394 + }, + { + "epoch": 0.9241148799207725, + "grad_norm": 8.496151478183513, + "learning_rate": 4.507955359370181e-08, + "loss": 0.5019, + "step": 22395 + }, + { + "epoch": 0.9241561442601304, + "grad_norm": 6.060823977576913, + "learning_rate": 4.503078748162398e-08, + "loss": 0.51, + "step": 22396 + }, + { + "epoch": 0.9241974085994883, + "grad_norm": 3.333428640012705, + "learning_rate": 4.4982047358935966e-08, + "loss": 0.5038, + "step": 22397 + }, + { + "epoch": 0.9242386729388462, + "grad_norm": 2.825415127583678, + "learning_rate": 4.493333322650872e-08, + "loss": 0.4734, + "step": 22398 + }, + { + "epoch": 0.9242799372782041, + "grad_norm": 16.715230437869714, + "learning_rate": 4.488464508521206e-08, + "loss": 0.5224, + "step": 22399 + }, + { + "epoch": 0.9243212016175621, + "grad_norm": 2.3987861191509317, + "learning_rate": 4.4835982935915787e-08, + "loss": 0.4803, + "step": 22400 + }, + { + "epoch": 0.9243624659569201, + "grad_norm": 8.81604970458607, + "learning_rate": 4.4787346779489204e-08, + "loss": 0.5155, + "step": 22401 + }, + { + "epoch": 0.924403730296278, + "grad_norm": 2.816408060409242, + "learning_rate": 4.4738736616800945e-08, + "loss": 0.5013, + "step": 22402 + }, + { + "epoch": 0.9244449946356359, + "grad_norm": 3.3469255728318403, + "learning_rate": 4.4690152448719324e-08, + "loss": 0.5709, + "step": 22403 + }, + { + "epoch": 0.9244862589749938, + "grad_norm": 2.5886053279330805, + "learning_rate": 4.464159427611214e-08, + "loss": 0.507, + "step": 22404 + }, + { + "epoch": 0.9245275233143517, + "grad_norm": 2.6355766328877337, + "learning_rate": 4.459306209984687e-08, + "loss": 0.543, + "step": 22405 + }, + { + "epoch": 0.9245687876537096, + "grad_norm": 2.7465352753930397, + "learning_rate": 4.4544555920790485e-08, + "loss": 0.5207, + "step": 22406 + }, + { + "epoch": 0.9246100519930676, + "grad_norm": 2.202926014086621, + "learning_rate": 4.4496075739809294e-08, + "loss": 0.5335, + "step": 22407 + }, + { + "epoch": 0.9246513163324255, + "grad_norm": 3.025522425241598, + "learning_rate": 4.444762155776927e-08, + "loss": 0.4862, + "step": 22408 + }, + { + "epoch": 0.9246925806717834, + "grad_norm": 11.361574355512541, + "learning_rate": 4.439919337553589e-08, + "loss": 0.4663, + "step": 22409 + }, + { + "epoch": 0.9247338450111414, + "grad_norm": 3.01218107608149, + "learning_rate": 4.4350791193974285e-08, + "loss": 0.5165, + "step": 22410 + }, + { + "epoch": 0.9247751093504993, + "grad_norm": 2.213993271362648, + "learning_rate": 4.43024150139491e-08, + "loss": 0.5031, + "step": 22411 + }, + { + "epoch": 0.9248163736898573, + "grad_norm": 3.7972346919979696, + "learning_rate": 4.425406483632433e-08, + "loss": 0.5056, + "step": 22412 + }, + { + "epoch": 0.9248576380292152, + "grad_norm": 3.5016119619977437, + "learning_rate": 4.420574066196359e-08, + "loss": 0.5342, + "step": 22413 + }, + { + "epoch": 0.9248989023685731, + "grad_norm": 18.948412877313622, + "learning_rate": 4.4157442491730536e-08, + "loss": 0.5064, + "step": 22414 + }, + { + "epoch": 0.924940166707931, + "grad_norm": 2.163988931422916, + "learning_rate": 4.410917032648731e-08, + "loss": 0.5154, + "step": 22415 + }, + { + "epoch": 0.9249814310472889, + "grad_norm": 1.8675617540320433, + "learning_rate": 4.406092416709623e-08, + "loss": 0.492, + "step": 22416 + }, + { + "epoch": 0.9250226953866468, + "grad_norm": 2.3588641679357187, + "learning_rate": 4.401270401441926e-08, + "loss": 0.5147, + "step": 22417 + }, + { + "epoch": 0.9250639597260047, + "grad_norm": 2.5423275324107792, + "learning_rate": 4.3964509869317884e-08, + "loss": 0.5277, + "step": 22418 + }, + { + "epoch": 0.9251052240653627, + "grad_norm": 4.11861019582384, + "learning_rate": 4.391634173265274e-08, + "loss": 0.5293, + "step": 22419 + }, + { + "epoch": 0.9251464884047207, + "grad_norm": 3.1646284053483704, + "learning_rate": 4.386819960528432e-08, + "loss": 0.485, + "step": 22420 + }, + { + "epoch": 0.9251877527440786, + "grad_norm": 2.566090755788515, + "learning_rate": 4.382008348807243e-08, + "loss": 0.5074, + "step": 22421 + }, + { + "epoch": 0.9252290170834365, + "grad_norm": 21.91157603061709, + "learning_rate": 4.377199338187671e-08, + "loss": 0.4931, + "step": 22422 + }, + { + "epoch": 0.9252702814227944, + "grad_norm": 1.9298149534564266, + "learning_rate": 4.372392928755614e-08, + "loss": 0.4703, + "step": 22423 + }, + { + "epoch": 0.9253115457621524, + "grad_norm": 3.3771872750167122, + "learning_rate": 4.367589120596921e-08, + "loss": 0.5051, + "step": 22424 + }, + { + "epoch": 0.9253528101015103, + "grad_norm": 2.7262363314139813, + "learning_rate": 4.362787913797406e-08, + "loss": 0.5492, + "step": 22425 + }, + { + "epoch": 0.9253940744408682, + "grad_norm": 10.731428870200507, + "learning_rate": 4.357989308442817e-08, + "loss": 0.5199, + "step": 22426 + }, + { + "epoch": 0.9254353387802261, + "grad_norm": 2.4537962717612936, + "learning_rate": 4.353193304618902e-08, + "loss": 0.4739, + "step": 22427 + }, + { + "epoch": 0.925476603119584, + "grad_norm": 2.202466751997664, + "learning_rate": 4.348399902411293e-08, + "loss": 0.4585, + "step": 22428 + }, + { + "epoch": 0.9255178674589419, + "grad_norm": 3.0104890251327268, + "learning_rate": 4.3436091019056215e-08, + "loss": 0.5858, + "step": 22429 + }, + { + "epoch": 0.9255591317983, + "grad_norm": 2.4488393048869415, + "learning_rate": 4.338820903187468e-08, + "loss": 0.4894, + "step": 22430 + }, + { + "epoch": 0.9256003961376579, + "grad_norm": 2.082686020973937, + "learning_rate": 4.3340353063423656e-08, + "loss": 0.5368, + "step": 22431 + }, + { + "epoch": 0.9256416604770158, + "grad_norm": 3.348233456626666, + "learning_rate": 4.3292523114558115e-08, + "loss": 0.4776, + "step": 22432 + }, + { + "epoch": 0.9256829248163737, + "grad_norm": 3.915385398515052, + "learning_rate": 4.324471918613188e-08, + "loss": 0.5298, + "step": 22433 + }, + { + "epoch": 0.9257241891557316, + "grad_norm": 3.193892395001696, + "learning_rate": 4.319694127899942e-08, + "loss": 0.4924, + "step": 22434 + }, + { + "epoch": 0.9257654534950895, + "grad_norm": 6.115013227051063, + "learning_rate": 4.314918939401391e-08, + "loss": 0.5334, + "step": 22435 + }, + { + "epoch": 0.9258067178344475, + "grad_norm": 5.1903879726228315, + "learning_rate": 4.310146353202832e-08, + "loss": 0.5257, + "step": 22436 + }, + { + "epoch": 0.9258479821738054, + "grad_norm": 2.5592146264678353, + "learning_rate": 4.3053763693895297e-08, + "loss": 0.5496, + "step": 22437 + }, + { + "epoch": 0.9258892465131633, + "grad_norm": 2.5212908680254533, + "learning_rate": 4.300608988046667e-08, + "loss": 0.5058, + "step": 22438 + }, + { + "epoch": 0.9259305108525212, + "grad_norm": 2.6138773407345113, + "learning_rate": 4.2958442092594256e-08, + "loss": 0.4569, + "step": 22439 + }, + { + "epoch": 0.9259717751918792, + "grad_norm": 3.312168945088158, + "learning_rate": 4.291082033112903e-08, + "loss": 0.4885, + "step": 22440 + }, + { + "epoch": 0.9260130395312371, + "grad_norm": 2.8042313527560507, + "learning_rate": 4.286322459692149e-08, + "loss": 0.5071, + "step": 22441 + }, + { + "epoch": 0.9260543038705951, + "grad_norm": 3.8154279763832966, + "learning_rate": 4.2815654890821957e-08, + "loss": 0.4639, + "step": 22442 + }, + { + "epoch": 0.926095568209953, + "grad_norm": 8.327956495273288, + "learning_rate": 4.276811121368007e-08, + "loss": 0.5365, + "step": 22443 + }, + { + "epoch": 0.9261368325493109, + "grad_norm": 3.974339157851314, + "learning_rate": 4.27205935663455e-08, + "loss": 0.5444, + "step": 22444 + }, + { + "epoch": 0.9261780968886688, + "grad_norm": 2.883088467941055, + "learning_rate": 4.267310194966639e-08, + "loss": 0.4725, + "step": 22445 + }, + { + "epoch": 0.9262193612280267, + "grad_norm": 5.468701109257351, + "learning_rate": 4.262563636449124e-08, + "loss": 0.4925, + "step": 22446 + }, + { + "epoch": 0.9262606255673846, + "grad_norm": 4.279336887769954, + "learning_rate": 4.257819681166819e-08, + "loss": 0.431, + "step": 22447 + }, + { + "epoch": 0.9263018899067426, + "grad_norm": 3.046057531453924, + "learning_rate": 4.253078329204457e-08, + "loss": 0.5536, + "step": 22448 + }, + { + "epoch": 0.9263431542461005, + "grad_norm": 6.7388545317614055, + "learning_rate": 4.248339580646704e-08, + "loss": 0.4798, + "step": 22449 + }, + { + "epoch": 0.9263844185854585, + "grad_norm": 3.182480573628393, + "learning_rate": 4.243603435578225e-08, + "loss": 0.5183, + "step": 22450 + }, + { + "epoch": 0.9264256829248164, + "grad_norm": 2.154748712335729, + "learning_rate": 4.238869894083602e-08, + "loss": 0.5195, + "step": 22451 + }, + { + "epoch": 0.9264669472641743, + "grad_norm": 3.9478331493742775, + "learning_rate": 4.2341389562474356e-08, + "loss": 0.5214, + "step": 22452 + }, + { + "epoch": 0.9265082116035323, + "grad_norm": 4.679645902155493, + "learning_rate": 4.229410622154189e-08, + "loss": 0.5056, + "step": 22453 + }, + { + "epoch": 0.9265494759428902, + "grad_norm": 4.341020398550843, + "learning_rate": 4.224684891888314e-08, + "loss": 0.5018, + "step": 22454 + }, + { + "epoch": 0.9265907402822481, + "grad_norm": 5.240329033868037, + "learning_rate": 4.219961765534242e-08, + "loss": 0.5462, + "step": 22455 + }, + { + "epoch": 0.926632004621606, + "grad_norm": 2.8378495835310296, + "learning_rate": 4.2152412431763376e-08, + "loss": 0.4763, + "step": 22456 + }, + { + "epoch": 0.9266732689609639, + "grad_norm": 5.071726630502819, + "learning_rate": 4.210523324898935e-08, + "loss": 0.4799, + "step": 22457 + }, + { + "epoch": 0.9267145333003218, + "grad_norm": 4.348032504824051, + "learning_rate": 4.205808010786283e-08, + "loss": 0.5101, + "step": 22458 + }, + { + "epoch": 0.9267557976396797, + "grad_norm": 3.1620044529014413, + "learning_rate": 4.201095300922614e-08, + "loss": 0.5115, + "step": 22459 + }, + { + "epoch": 0.9267970619790377, + "grad_norm": 3.272532064175427, + "learning_rate": 4.196385195392144e-08, + "loss": 0.5041, + "step": 22460 + }, + { + "epoch": 0.9268383263183957, + "grad_norm": 1.9578960598737705, + "learning_rate": 4.191677694278956e-08, + "loss": 0.5007, + "step": 22461 + }, + { + "epoch": 0.9268795906577536, + "grad_norm": 2.597454646456293, + "learning_rate": 4.186972797667149e-08, + "loss": 0.5664, + "step": 22462 + }, + { + "epoch": 0.9269208549971115, + "grad_norm": 6.446479237335273, + "learning_rate": 4.182270505640789e-08, + "loss": 0.5094, + "step": 22463 + }, + { + "epoch": 0.9269621193364694, + "grad_norm": 2.9027122722073995, + "learning_rate": 4.177570818283877e-08, + "loss": 0.4917, + "step": 22464 + }, + { + "epoch": 0.9270033836758274, + "grad_norm": 3.8486078105244186, + "learning_rate": 4.1728737356803273e-08, + "loss": 0.567, + "step": 22465 + }, + { + "epoch": 0.9270446480151853, + "grad_norm": 2.6304420746737875, + "learning_rate": 4.1681792579140734e-08, + "loss": 0.4409, + "step": 22466 + }, + { + "epoch": 0.9270859123545432, + "grad_norm": 2.43332782733221, + "learning_rate": 4.1634873850689315e-08, + "loss": 0.4537, + "step": 22467 + }, + { + "epoch": 0.9271271766939011, + "grad_norm": 2.3673322136754646, + "learning_rate": 4.158798117228735e-08, + "loss": 0.513, + "step": 22468 + }, + { + "epoch": 0.927168441033259, + "grad_norm": 5.4422146681355885, + "learning_rate": 4.1541114544772494e-08, + "loss": 0.5071, + "step": 22469 + }, + { + "epoch": 0.9272097053726169, + "grad_norm": 9.401850797201815, + "learning_rate": 4.1494273968981755e-08, + "loss": 0.5271, + "step": 22470 + }, + { + "epoch": 0.927250969711975, + "grad_norm": 2.5876730214391768, + "learning_rate": 4.144745944575195e-08, + "loss": 0.5851, + "step": 22471 + }, + { + "epoch": 0.9272922340513329, + "grad_norm": 4.456694445642757, + "learning_rate": 4.14006709759191e-08, + "loss": 0.5457, + "step": 22472 + }, + { + "epoch": 0.9273334983906908, + "grad_norm": 3.2164529158204362, + "learning_rate": 4.135390856031934e-08, + "loss": 0.5026, + "step": 22473 + }, + { + "epoch": 0.9273747627300487, + "grad_norm": 2.4903375289408958, + "learning_rate": 4.130717219978769e-08, + "loss": 0.5659, + "step": 22474 + }, + { + "epoch": 0.9274160270694066, + "grad_norm": 2.866460181515897, + "learning_rate": 4.1260461895158975e-08, + "loss": 0.5046, + "step": 22475 + }, + { + "epoch": 0.9274572914087645, + "grad_norm": 11.86692604078778, + "learning_rate": 4.1213777647267856e-08, + "loss": 0.4969, + "step": 22476 + }, + { + "epoch": 0.9274985557481225, + "grad_norm": 9.264516473326053, + "learning_rate": 4.1167119456947675e-08, + "loss": 0.5139, + "step": 22477 + }, + { + "epoch": 0.9275398200874804, + "grad_norm": 1.9111359531655614, + "learning_rate": 4.1120487325032264e-08, + "loss": 0.5228, + "step": 22478 + }, + { + "epoch": 0.9275810844268383, + "grad_norm": 7.290989355804162, + "learning_rate": 4.1073881252354785e-08, + "loss": 0.5277, + "step": 22479 + }, + { + "epoch": 0.9276223487661962, + "grad_norm": 2.986677666022168, + "learning_rate": 4.102730123974707e-08, + "loss": 0.5089, + "step": 22480 + }, + { + "epoch": 0.9276636131055542, + "grad_norm": 5.396398084947146, + "learning_rate": 4.0980747288041796e-08, + "loss": 0.5554, + "step": 22481 + }, + { + "epoch": 0.9277048774449121, + "grad_norm": 2.852459746661441, + "learning_rate": 4.0934219398070295e-08, + "loss": 0.5033, + "step": 22482 + }, + { + "epoch": 0.9277461417842701, + "grad_norm": 3.731654729745415, + "learning_rate": 4.0887717570663396e-08, + "loss": 0.5837, + "step": 22483 + }, + { + "epoch": 0.927787406123628, + "grad_norm": 2.189232144159504, + "learning_rate": 4.084124180665211e-08, + "loss": 0.5328, + "step": 22484 + }, + { + "epoch": 0.9278286704629859, + "grad_norm": 12.319911943705442, + "learning_rate": 4.079479210686676e-08, + "loss": 0.5518, + "step": 22485 + }, + { + "epoch": 0.9278699348023438, + "grad_norm": 4.842623034400897, + "learning_rate": 4.0748368472136536e-08, + "loss": 0.5375, + "step": 22486 + }, + { + "epoch": 0.9279111991417017, + "grad_norm": 5.719114379578967, + "learning_rate": 4.0701970903290917e-08, + "loss": 0.4818, + "step": 22487 + }, + { + "epoch": 0.9279524634810596, + "grad_norm": 5.3122043516466375, + "learning_rate": 4.065559940115876e-08, + "loss": 0.5955, + "step": 22488 + }, + { + "epoch": 0.9279937278204176, + "grad_norm": 5.934410160004451, + "learning_rate": 4.0609253966568395e-08, + "loss": 0.4398, + "step": 22489 + }, + { + "epoch": 0.9280349921597755, + "grad_norm": 2.674163010665878, + "learning_rate": 4.056293460034749e-08, + "loss": 0.5706, + "step": 22490 + }, + { + "epoch": 0.9280762564991335, + "grad_norm": 2.8936965778851995, + "learning_rate": 4.0516641303323557e-08, + "loss": 0.5607, + "step": 22491 + }, + { + "epoch": 0.9281175208384914, + "grad_norm": 2.9574333408674165, + "learning_rate": 4.0470374076323424e-08, + "loss": 0.5031, + "step": 22492 + }, + { + "epoch": 0.9281587851778493, + "grad_norm": 14.502470000965072, + "learning_rate": 4.042413292017394e-08, + "loss": 0.5284, + "step": 22493 + }, + { + "epoch": 0.9282000495172072, + "grad_norm": 6.265631198495808, + "learning_rate": 4.0377917835700437e-08, + "loss": 0.5792, + "step": 22494 + }, + { + "epoch": 0.9282413138565652, + "grad_norm": 2.4382154886829164, + "learning_rate": 4.0331728823728595e-08, + "loss": 0.5032, + "step": 22495 + }, + { + "epoch": 0.9282825781959231, + "grad_norm": 5.599896069223556, + "learning_rate": 4.028556588508375e-08, + "loss": 0.5131, + "step": 22496 + }, + { + "epoch": 0.928323842535281, + "grad_norm": 4.185804492046591, + "learning_rate": 4.023942902059058e-08, + "loss": 0.5061, + "step": 22497 + }, + { + "epoch": 0.9283651068746389, + "grad_norm": 2.018367961843524, + "learning_rate": 4.019331823107258e-08, + "loss": 0.5261, + "step": 22498 + }, + { + "epoch": 0.9284063712139968, + "grad_norm": 2.2372283505524413, + "learning_rate": 4.014723351735394e-08, + "loss": 0.5554, + "step": 22499 + }, + { + "epoch": 0.9284476355533547, + "grad_norm": 2.992871768340897, + "learning_rate": 4.0101174880257827e-08, + "loss": 0.5636, + "step": 22500 + }, + { + "epoch": 0.9284888998927128, + "grad_norm": 4.379151903629773, + "learning_rate": 4.005514232060675e-08, + "loss": 0.5447, + "step": 22501 + }, + { + "epoch": 0.9285301642320707, + "grad_norm": 3.1334740420351346, + "learning_rate": 4.0009135839223056e-08, + "loss": 0.4861, + "step": 22502 + }, + { + "epoch": 0.9285714285714286, + "grad_norm": 2.676832721436672, + "learning_rate": 3.996315543692841e-08, + "loss": 0.4837, + "step": 22503 + }, + { + "epoch": 0.9286126929107865, + "grad_norm": 3.2230656466390566, + "learning_rate": 3.991720111454433e-08, + "loss": 0.534, + "step": 22504 + }, + { + "epoch": 0.9286539572501444, + "grad_norm": 3.879831690637579, + "learning_rate": 3.987127287289183e-08, + "loss": 0.5061, + "step": 22505 + }, + { + "epoch": 0.9286952215895024, + "grad_norm": 2.2603509486555473, + "learning_rate": 3.982537071279091e-08, + "loss": 0.5013, + "step": 22506 + }, + { + "epoch": 0.9287364859288603, + "grad_norm": 8.491694150471215, + "learning_rate": 3.9779494635061755e-08, + "loss": 0.5448, + "step": 22507 + }, + { + "epoch": 0.9287777502682182, + "grad_norm": 2.3074772644484796, + "learning_rate": 3.973364464052354e-08, + "loss": 0.5185, + "step": 22508 + }, + { + "epoch": 0.9288190146075761, + "grad_norm": 4.72887258140137, + "learning_rate": 3.968782072999544e-08, + "loss": 0.5784, + "step": 22509 + }, + { + "epoch": 0.928860278946934, + "grad_norm": 4.428267910556946, + "learning_rate": 3.9642022904296304e-08, + "loss": 0.49, + "step": 22510 + }, + { + "epoch": 0.928901543286292, + "grad_norm": 3.6781205022011725, + "learning_rate": 3.959625116424365e-08, + "loss": 0.4769, + "step": 22511 + }, + { + "epoch": 0.92894280762565, + "grad_norm": 3.381370345860256, + "learning_rate": 3.955050551065531e-08, + "loss": 0.4813, + "step": 22512 + }, + { + "epoch": 0.9289840719650079, + "grad_norm": 2.1706867502209874, + "learning_rate": 3.950478594434847e-08, + "loss": 0.4673, + "step": 22513 + }, + { + "epoch": 0.9290253363043658, + "grad_norm": 2.0201786241614204, + "learning_rate": 3.9459092466139826e-08, + "loss": 0.4929, + "step": 22514 + }, + { + "epoch": 0.9290666006437237, + "grad_norm": 6.104156111353912, + "learning_rate": 3.941342507684553e-08, + "loss": 0.5093, + "step": 22515 + }, + { + "epoch": 0.9291078649830816, + "grad_norm": 2.7982540788035895, + "learning_rate": 3.936778377728112e-08, + "loss": 0.4979, + "step": 22516 + }, + { + "epoch": 0.9291491293224395, + "grad_norm": 4.160235347260332, + "learning_rate": 3.932216856826226e-08, + "loss": 0.5243, + "step": 22517 + }, + { + "epoch": 0.9291903936617975, + "grad_norm": 2.3776255275856673, + "learning_rate": 3.927657945060331e-08, + "loss": 0.4437, + "step": 22518 + }, + { + "epoch": 0.9292316580011554, + "grad_norm": 3.48790739456892, + "learning_rate": 3.9231016425119114e-08, + "loss": 0.5272, + "step": 22519 + }, + { + "epoch": 0.9292729223405133, + "grad_norm": 3.602709709643291, + "learning_rate": 3.9185479492622855e-08, + "loss": 0.5325, + "step": 22520 + }, + { + "epoch": 0.9293141866798712, + "grad_norm": 9.003422182736168, + "learning_rate": 3.913996865392855e-08, + "loss": 0.4578, + "step": 22521 + }, + { + "epoch": 0.9293554510192292, + "grad_norm": 2.059711212548721, + "learning_rate": 3.909448390984904e-08, + "loss": 0.4734, + "step": 22522 + }, + { + "epoch": 0.9293967153585871, + "grad_norm": 2.5139508556997012, + "learning_rate": 3.904902526119636e-08, + "loss": 0.5285, + "step": 22523 + }, + { + "epoch": 0.9294379796979451, + "grad_norm": 2.2335334848448003, + "learning_rate": 3.900359270878301e-08, + "loss": 0.531, + "step": 22524 + }, + { + "epoch": 0.929479244037303, + "grad_norm": 3.603030537581522, + "learning_rate": 3.895818625342035e-08, + "loss": 0.518, + "step": 22525 + }, + { + "epoch": 0.9295205083766609, + "grad_norm": 2.1110819582234193, + "learning_rate": 3.891280589591956e-08, + "loss": 0.5121, + "step": 22526 + }, + { + "epoch": 0.9295617727160188, + "grad_norm": 7.50675751277981, + "learning_rate": 3.886745163709082e-08, + "loss": 0.4839, + "step": 22527 + }, + { + "epoch": 0.9296030370553767, + "grad_norm": 3.9432497313490633, + "learning_rate": 3.882212347774483e-08, + "loss": 0.4946, + "step": 22528 + }, + { + "epoch": 0.9296443013947346, + "grad_norm": 2.4386168611658983, + "learning_rate": 3.877682141869093e-08, + "loss": 0.493, + "step": 22529 + }, + { + "epoch": 0.9296855657340926, + "grad_norm": 4.132639082617912, + "learning_rate": 3.873154546073848e-08, + "loss": 0.5029, + "step": 22530 + }, + { + "epoch": 0.9297268300734505, + "grad_norm": 3.5470844987971577, + "learning_rate": 3.8686295604696164e-08, + "loss": 0.4884, + "step": 22531 + }, + { + "epoch": 0.9297680944128085, + "grad_norm": 2.595296397315847, + "learning_rate": 3.864107185137233e-08, + "loss": 0.5618, + "step": 22532 + }, + { + "epoch": 0.9298093587521664, + "grad_norm": 2.2780487571315877, + "learning_rate": 3.859587420157451e-08, + "loss": 0.55, + "step": 22533 + }, + { + "epoch": 0.9298506230915243, + "grad_norm": 2.0369468533619153, + "learning_rate": 3.8550702656110546e-08, + "loss": 0.5326, + "step": 22534 + }, + { + "epoch": 0.9298918874308822, + "grad_norm": 1.771737526263533, + "learning_rate": 3.8505557215786635e-08, + "loss": 0.4472, + "step": 22535 + }, + { + "epoch": 0.9299331517702402, + "grad_norm": 3.7047052990120695, + "learning_rate": 3.846043788140979e-08, + "loss": 0.5262, + "step": 22536 + }, + { + "epoch": 0.9299744161095981, + "grad_norm": 8.657467243993517, + "learning_rate": 3.841534465378571e-08, + "loss": 0.5144, + "step": 22537 + }, + { + "epoch": 0.930015680448956, + "grad_norm": 3.6294908767114062, + "learning_rate": 3.837027753371991e-08, + "loss": 0.5101, + "step": 22538 + }, + { + "epoch": 0.9300569447883139, + "grad_norm": 5.92375125498658, + "learning_rate": 3.8325236522017415e-08, + "loss": 0.5204, + "step": 22539 + }, + { + "epoch": 0.9300982091276718, + "grad_norm": 2.5122194368315904, + "learning_rate": 3.828022161948258e-08, + "loss": 0.5516, + "step": 22540 + }, + { + "epoch": 0.9301394734670297, + "grad_norm": 4.72646013113794, + "learning_rate": 3.8235232826919765e-08, + "loss": 0.4648, + "step": 22541 + }, + { + "epoch": 0.9301807378063878, + "grad_norm": 1.955593029170522, + "learning_rate": 3.8190270145132657e-08, + "loss": 0.3959, + "step": 22542 + }, + { + "epoch": 0.9302220021457457, + "grad_norm": 3.163587247174016, + "learning_rate": 3.814533357492395e-08, + "loss": 0.4424, + "step": 22543 + }, + { + "epoch": 0.9302632664851036, + "grad_norm": 3.209388068106242, + "learning_rate": 3.810042311709666e-08, + "loss": 0.4871, + "step": 22544 + }, + { + "epoch": 0.9303045308244615, + "grad_norm": 12.148510038153631, + "learning_rate": 3.805553877245299e-08, + "loss": 0.4853, + "step": 22545 + }, + { + "epoch": 0.9303457951638194, + "grad_norm": 2.7515313182725016, + "learning_rate": 3.801068054179446e-08, + "loss": 0.4985, + "step": 22546 + }, + { + "epoch": 0.9303870595031773, + "grad_norm": 4.833731746740909, + "learning_rate": 3.7965848425922764e-08, + "loss": 0.4944, + "step": 22547 + }, + { + "epoch": 0.9304283238425353, + "grad_norm": 30.02882185689982, + "learning_rate": 3.7921042425638264e-08, + "loss": 0.5394, + "step": 22548 + }, + { + "epoch": 0.9304695881818932, + "grad_norm": 4.349254764159676, + "learning_rate": 3.787626254174148e-08, + "loss": 0.5364, + "step": 22549 + }, + { + "epoch": 0.9305108525212511, + "grad_norm": 3.006128749296885, + "learning_rate": 3.783150877503244e-08, + "loss": 0.5824, + "step": 22550 + }, + { + "epoch": 0.930552116860609, + "grad_norm": 3.3253881783990327, + "learning_rate": 3.778678112631051e-08, + "loss": 0.4755, + "step": 22551 + }, + { + "epoch": 0.930593381199967, + "grad_norm": 4.736386500803624, + "learning_rate": 3.774207959637438e-08, + "loss": 0.4471, + "step": 22552 + }, + { + "epoch": 0.930634645539325, + "grad_norm": 5.212655208409607, + "learning_rate": 3.769740418602274e-08, + "loss": 0.4919, + "step": 22553 + }, + { + "epoch": 0.9306759098786829, + "grad_norm": 2.882593353541471, + "learning_rate": 3.7652754896053795e-08, + "loss": 0.477, + "step": 22554 + }, + { + "epoch": 0.9307171742180408, + "grad_norm": 2.283374616199571, + "learning_rate": 3.7608131727264573e-08, + "loss": 0.4565, + "step": 22555 + }, + { + "epoch": 0.9307584385573987, + "grad_norm": 2.7455935821344335, + "learning_rate": 3.756353468045259e-08, + "loss": 0.5201, + "step": 22556 + }, + { + "epoch": 0.9307997028967566, + "grad_norm": 4.913723222494771, + "learning_rate": 3.7518963756414396e-08, + "loss": 0.5268, + "step": 22557 + }, + { + "epoch": 0.9308409672361145, + "grad_norm": 2.6331918422673186, + "learning_rate": 3.747441895594617e-08, + "loss": 0.5565, + "step": 22558 + }, + { + "epoch": 0.9308822315754725, + "grad_norm": 2.954779229744207, + "learning_rate": 3.7429900279843446e-08, + "loss": 0.5332, + "step": 22559 + }, + { + "epoch": 0.9309234959148304, + "grad_norm": 3.121682947785422, + "learning_rate": 3.7385407728901253e-08, + "loss": 0.5226, + "step": 22560 + }, + { + "epoch": 0.9309647602541883, + "grad_norm": 3.275720717644181, + "learning_rate": 3.73409413039148e-08, + "loss": 0.4717, + "step": 22561 + }, + { + "epoch": 0.9310060245935463, + "grad_norm": 3.700636423396278, + "learning_rate": 3.729650100567794e-08, + "loss": 0.5231, + "step": 22562 + }, + { + "epoch": 0.9310472889329042, + "grad_norm": 2.459959772279771, + "learning_rate": 3.7252086834985044e-08, + "loss": 0.5801, + "step": 22563 + }, + { + "epoch": 0.9310885532722621, + "grad_norm": 2.488687355208705, + "learning_rate": 3.720769879262881e-08, + "loss": 0.5009, + "step": 22564 + }, + { + "epoch": 0.9311298176116201, + "grad_norm": 3.284489365404418, + "learning_rate": 3.716333687940243e-08, + "loss": 0.509, + "step": 22565 + }, + { + "epoch": 0.931171081950978, + "grad_norm": 3.506553190206086, + "learning_rate": 3.711900109609845e-08, + "loss": 0.5392, + "step": 22566 + }, + { + "epoch": 0.9312123462903359, + "grad_norm": 3.02914045405124, + "learning_rate": 3.7074691443508724e-08, + "loss": 0.5154, + "step": 22567 + }, + { + "epoch": 0.9312536106296938, + "grad_norm": 5.361438381487746, + "learning_rate": 3.703040792242446e-08, + "loss": 0.5342, + "step": 22568 + }, + { + "epoch": 0.9312948749690517, + "grad_norm": 2.585254893907198, + "learning_rate": 3.698615053363702e-08, + "loss": 0.5196, + "step": 22569 + }, + { + "epoch": 0.9313361393084096, + "grad_norm": 4.2259166375215536, + "learning_rate": 3.6941919277936774e-08, + "loss": 0.5568, + "step": 22570 + }, + { + "epoch": 0.9313774036477676, + "grad_norm": 3.9239292573326328, + "learning_rate": 3.689771415611426e-08, + "loss": 0.5056, + "step": 22571 + }, + { + "epoch": 0.9314186679871256, + "grad_norm": 3.273339504747641, + "learning_rate": 3.685353516895834e-08, + "loss": 0.5516, + "step": 22572 + }, + { + "epoch": 0.9314599323264835, + "grad_norm": 2.440507603926783, + "learning_rate": 3.6809382317258386e-08, + "loss": 0.4381, + "step": 22573 + }, + { + "epoch": 0.9315011966658414, + "grad_norm": 8.34583731267182, + "learning_rate": 3.676525560180344e-08, + "loss": 0.5653, + "step": 22574 + }, + { + "epoch": 0.9315424610051993, + "grad_norm": 3.8050432807104633, + "learning_rate": 3.6721155023381525e-08, + "loss": 0.5183, + "step": 22575 + }, + { + "epoch": 0.9315837253445572, + "grad_norm": 3.6963099047358714, + "learning_rate": 3.6677080582780185e-08, + "loss": 0.4909, + "step": 22576 + }, + { + "epoch": 0.9316249896839152, + "grad_norm": 2.577348454570518, + "learning_rate": 3.6633032280786795e-08, + "loss": 0.5139, + "step": 22577 + }, + { + "epoch": 0.9316662540232731, + "grad_norm": 4.579929023547315, + "learning_rate": 3.6589010118188215e-08, + "loss": 0.5318, + "step": 22578 + }, + { + "epoch": 0.931707518362631, + "grad_norm": 4.932591942475185, + "learning_rate": 3.6545014095771155e-08, + "loss": 0.5281, + "step": 22579 + }, + { + "epoch": 0.9317487827019889, + "grad_norm": 8.909113421927337, + "learning_rate": 3.650104421432082e-08, + "loss": 0.4933, + "step": 22580 + }, + { + "epoch": 0.9317900470413468, + "grad_norm": 6.865064642074581, + "learning_rate": 3.645710047462292e-08, + "loss": 0.5328, + "step": 22581 + }, + { + "epoch": 0.9318313113807047, + "grad_norm": 2.682228914726381, + "learning_rate": 3.641318287746248e-08, + "loss": 0.5451, + "step": 22582 + }, + { + "epoch": 0.9318725757200628, + "grad_norm": 2.1326951618213403, + "learning_rate": 3.6369291423624215e-08, + "loss": 0.452, + "step": 22583 + }, + { + "epoch": 0.9319138400594207, + "grad_norm": 2.290482656977659, + "learning_rate": 3.63254261138915e-08, + "loss": 0.4804, + "step": 22584 + }, + { + "epoch": 0.9319551043987786, + "grad_norm": 2.030567296481979, + "learning_rate": 3.628158694904854e-08, + "loss": 0.5027, + "step": 22585 + }, + { + "epoch": 0.9319963687381365, + "grad_norm": 5.350037239544948, + "learning_rate": 3.623777392987787e-08, + "loss": 0.5175, + "step": 22586 + }, + { + "epoch": 0.9320376330774944, + "grad_norm": 2.7992205347552392, + "learning_rate": 3.619398705716237e-08, + "loss": 0.5301, + "step": 22587 + }, + { + "epoch": 0.9320788974168523, + "grad_norm": 13.943427324246876, + "learning_rate": 3.615022633168425e-08, + "loss": 0.5094, + "step": 22588 + }, + { + "epoch": 0.9321201617562103, + "grad_norm": 3.4871318842060033, + "learning_rate": 3.610649175422487e-08, + "loss": 0.5614, + "step": 22589 + }, + { + "epoch": 0.9321614260955682, + "grad_norm": 9.55079925499337, + "learning_rate": 3.6062783325565786e-08, + "loss": 0.4699, + "step": 22590 + }, + { + "epoch": 0.9322026904349261, + "grad_norm": 3.9899498989650075, + "learning_rate": 3.601910104648753e-08, + "loss": 0.554, + "step": 22591 + }, + { + "epoch": 0.932243954774284, + "grad_norm": 2.4083529487357263, + "learning_rate": 3.5975444917770494e-08, + "loss": 0.5206, + "step": 22592 + }, + { + "epoch": 0.932285219113642, + "grad_norm": 3.790497840668169, + "learning_rate": 3.593181494019454e-08, + "loss": 0.5253, + "step": 22593 + }, + { + "epoch": 0.932326483453, + "grad_norm": 5.4773261304506695, + "learning_rate": 3.5888211114538715e-08, + "loss": 0.5253, + "step": 22594 + }, + { + "epoch": 0.9323677477923579, + "grad_norm": 2.630124152608968, + "learning_rate": 3.584463344158223e-08, + "loss": 0.4922, + "step": 22595 + }, + { + "epoch": 0.9324090121317158, + "grad_norm": 2.2670151614227336, + "learning_rate": 3.5801081922103294e-08, + "loss": 0.527, + "step": 22596 + }, + { + "epoch": 0.9324502764710737, + "grad_norm": 2.380040277353145, + "learning_rate": 3.575755655687979e-08, + "loss": 0.4995, + "step": 22597 + }, + { + "epoch": 0.9324915408104316, + "grad_norm": 2.1179788752578785, + "learning_rate": 3.5714057346689586e-08, + "loss": 0.5048, + "step": 22598 + }, + { + "epoch": 0.9325328051497895, + "grad_norm": 2.6603053929080405, + "learning_rate": 3.567058429230924e-08, + "loss": 0.5674, + "step": 22599 + }, + { + "epoch": 0.9325740694891475, + "grad_norm": 7.177169421721219, + "learning_rate": 3.5627137394515455e-08, + "loss": 0.5518, + "step": 22600 + }, + { + "epoch": 0.9326153338285054, + "grad_norm": 3.3053285726639925, + "learning_rate": 3.558371665408427e-08, + "loss": 0.5597, + "step": 22601 + }, + { + "epoch": 0.9326565981678633, + "grad_norm": 2.447178432711528, + "learning_rate": 3.554032207179125e-08, + "loss": 0.4947, + "step": 22602 + }, + { + "epoch": 0.9326978625072213, + "grad_norm": 3.690632054739074, + "learning_rate": 3.54969536484116e-08, + "loss": 0.5236, + "step": 22603 + }, + { + "epoch": 0.9327391268465792, + "grad_norm": 3.2162519557009634, + "learning_rate": 3.54536113847202e-08, + "loss": 0.44, + "step": 22604 + }, + { + "epoch": 0.9327803911859371, + "grad_norm": 13.837775385690305, + "learning_rate": 3.5410295281490756e-08, + "loss": 0.5227, + "step": 22605 + }, + { + "epoch": 0.9328216555252951, + "grad_norm": 2.6655844748832545, + "learning_rate": 3.5367005339497495e-08, + "loss": 0.5668, + "step": 22606 + }, + { + "epoch": 0.932862919864653, + "grad_norm": 7.336402670295332, + "learning_rate": 3.532374155951329e-08, + "loss": 0.5091, + "step": 22607 + }, + { + "epoch": 0.9329041842040109, + "grad_norm": 2.793756252832332, + "learning_rate": 3.528050394231136e-08, + "loss": 0.509, + "step": 22608 + }, + { + "epoch": 0.9329454485433688, + "grad_norm": 3.8009002085491272, + "learning_rate": 3.5237292488663584e-08, + "loss": 0.5316, + "step": 22609 + }, + { + "epoch": 0.9329867128827267, + "grad_norm": 13.863696334458357, + "learning_rate": 3.519410719934202e-08, + "loss": 0.4814, + "step": 22610 + }, + { + "epoch": 0.9330279772220846, + "grad_norm": 3.855854821969595, + "learning_rate": 3.515094807511821e-08, + "loss": 0.526, + "step": 22611 + }, + { + "epoch": 0.9330692415614426, + "grad_norm": 2.768598991696697, + "learning_rate": 3.510781511676303e-08, + "loss": 0.5109, + "step": 22612 + }, + { + "epoch": 0.9331105059008006, + "grad_norm": 9.983778005770608, + "learning_rate": 3.506470832504671e-08, + "loss": 0.5172, + "step": 22613 + }, + { + "epoch": 0.9331517702401585, + "grad_norm": 3.4397502683965526, + "learning_rate": 3.502162770073947e-08, + "loss": 0.4468, + "step": 22614 + }, + { + "epoch": 0.9331930345795164, + "grad_norm": 7.324217363280943, + "learning_rate": 3.497857324461068e-08, + "loss": 0.4988, + "step": 22615 + }, + { + "epoch": 0.9332342989188743, + "grad_norm": 3.073458977710386, + "learning_rate": 3.493554495742973e-08, + "loss": 0.5255, + "step": 22616 + }, + { + "epoch": 0.9332755632582322, + "grad_norm": 2.4213048689075816, + "learning_rate": 3.489254283996468e-08, + "loss": 0.5615, + "step": 22617 + }, + { + "epoch": 0.9333168275975902, + "grad_norm": 3.170207786279862, + "learning_rate": 3.484956689298407e-08, + "loss": 0.5005, + "step": 22618 + }, + { + "epoch": 0.9333580919369481, + "grad_norm": 7.033400423304286, + "learning_rate": 3.480661711725547e-08, + "loss": 0.4932, + "step": 22619 + }, + { + "epoch": 0.933399356276306, + "grad_norm": 5.793476024515487, + "learning_rate": 3.476369351354608e-08, + "loss": 0.5109, + "step": 22620 + }, + { + "epoch": 0.9334406206156639, + "grad_norm": 2.6484622205321693, + "learning_rate": 3.4720796082622464e-08, + "loss": 0.5225, + "step": 22621 + }, + { + "epoch": 0.9334818849550218, + "grad_norm": 2.1893194030475165, + "learning_rate": 3.4677924825251174e-08, + "loss": 0.4815, + "step": 22622 + }, + { + "epoch": 0.9335231492943799, + "grad_norm": 2.766241735291315, + "learning_rate": 3.463507974219759e-08, + "loss": 0.4899, + "step": 22623 + }, + { + "epoch": 0.9335644136337378, + "grad_norm": 32.377375123080164, + "learning_rate": 3.459226083422745e-08, + "loss": 0.511, + "step": 22624 + }, + { + "epoch": 0.9336056779730957, + "grad_norm": 2.7464906710283823, + "learning_rate": 3.4549468102105465e-08, + "loss": 0.5191, + "step": 22625 + }, + { + "epoch": 0.9336469423124536, + "grad_norm": 2.2125493379434755, + "learning_rate": 3.450670154659585e-08, + "loss": 0.5332, + "step": 22626 + }, + { + "epoch": 0.9336882066518115, + "grad_norm": 3.6685391389671413, + "learning_rate": 3.446396116846268e-08, + "loss": 0.5796, + "step": 22627 + }, + { + "epoch": 0.9337294709911694, + "grad_norm": 2.418285115128218, + "learning_rate": 3.442124696846932e-08, + "loss": 0.4665, + "step": 22628 + }, + { + "epoch": 0.9337707353305273, + "grad_norm": 3.5994742035988367, + "learning_rate": 3.437855894737885e-08, + "loss": 0.5802, + "step": 22629 + }, + { + "epoch": 0.9338119996698853, + "grad_norm": 3.7710905565608255, + "learning_rate": 3.4335897105953804e-08, + "loss": 0.4926, + "step": 22630 + }, + { + "epoch": 0.9338532640092432, + "grad_norm": 4.679488840937263, + "learning_rate": 3.429326144495609e-08, + "loss": 0.5284, + "step": 22631 + }, + { + "epoch": 0.9338945283486011, + "grad_norm": 3.323674495467825, + "learning_rate": 3.425065196514743e-08, + "loss": 0.561, + "step": 22632 + }, + { + "epoch": 0.9339357926879591, + "grad_norm": 3.66724886327062, + "learning_rate": 3.4208068667288704e-08, + "loss": 0.4629, + "step": 22633 + }, + { + "epoch": 0.933977057027317, + "grad_norm": 2.5403530777451455, + "learning_rate": 3.416551155214082e-08, + "loss": 0.4758, + "step": 22634 + }, + { + "epoch": 0.934018321366675, + "grad_norm": 9.664964684045977, + "learning_rate": 3.412298062046382e-08, + "loss": 0.5489, + "step": 22635 + }, + { + "epoch": 0.9340595857060329, + "grad_norm": 2.988641244476906, + "learning_rate": 3.40804758730176e-08, + "loss": 0.5141, + "step": 22636 + }, + { + "epoch": 0.9341008500453908, + "grad_norm": 4.906140148456487, + "learning_rate": 3.403799731056123e-08, + "loss": 0.5146, + "step": 22637 + }, + { + "epoch": 0.9341421143847487, + "grad_norm": 7.406630122305529, + "learning_rate": 3.399554493385343e-08, + "loss": 0.5041, + "step": 22638 + }, + { + "epoch": 0.9341833787241066, + "grad_norm": 3.448869285887922, + "learning_rate": 3.395311874365242e-08, + "loss": 0.4923, + "step": 22639 + }, + { + "epoch": 0.9342246430634645, + "grad_norm": 3.7184159503803262, + "learning_rate": 3.391071874071627e-08, + "loss": 0.575, + "step": 22640 + }, + { + "epoch": 0.9342659074028224, + "grad_norm": 3.0282383766292353, + "learning_rate": 3.3868344925802544e-08, + "loss": 0.546, + "step": 22641 + }, + { + "epoch": 0.9343071717421804, + "grad_norm": 9.829416668112607, + "learning_rate": 3.382599729966762e-08, + "loss": 0.5413, + "step": 22642 + }, + { + "epoch": 0.9343484360815383, + "grad_norm": 2.2104066353411795, + "learning_rate": 3.378367586306824e-08, + "loss": 0.4677, + "step": 22643 + }, + { + "epoch": 0.9343897004208963, + "grad_norm": 2.2578051951765667, + "learning_rate": 3.37413806167603e-08, + "loss": 0.4846, + "step": 22644 + }, + { + "epoch": 0.9344309647602542, + "grad_norm": 2.557180978522731, + "learning_rate": 3.3699111561499354e-08, + "loss": 0.5221, + "step": 22645 + }, + { + "epoch": 0.9344722290996121, + "grad_norm": 4.040539656384605, + "learning_rate": 3.3656868698040466e-08, + "loss": 0.4781, + "step": 22646 + }, + { + "epoch": 0.93451349343897, + "grad_norm": 4.044858961145957, + "learning_rate": 3.361465202713804e-08, + "loss": 0.5153, + "step": 22647 + }, + { + "epoch": 0.934554757778328, + "grad_norm": 4.113538032139742, + "learning_rate": 3.357246154954613e-08, + "loss": 0.4873, + "step": 22648 + }, + { + "epoch": 0.9345960221176859, + "grad_norm": 5.6983233154752, + "learning_rate": 3.3530297266018805e-08, + "loss": 0.5285, + "step": 22649 + }, + { + "epoch": 0.9346372864570438, + "grad_norm": 2.5006091383755904, + "learning_rate": 3.3488159177308794e-08, + "loss": 0.5388, + "step": 22650 + }, + { + "epoch": 0.9346785507964017, + "grad_norm": 2.5392567216472197, + "learning_rate": 3.344604728416883e-08, + "loss": 0.474, + "step": 22651 + }, + { + "epoch": 0.9347198151357596, + "grad_norm": 4.140479826271212, + "learning_rate": 3.340396158735132e-08, + "loss": 0.4743, + "step": 22652 + }, + { + "epoch": 0.9347610794751176, + "grad_norm": 2.7589376836261663, + "learning_rate": 3.3361902087607976e-08, + "loss": 0.5225, + "step": 22653 + }, + { + "epoch": 0.9348023438144756, + "grad_norm": 4.403973171072661, + "learning_rate": 3.3319868785689876e-08, + "loss": 0.5143, + "step": 22654 + }, + { + "epoch": 0.9348436081538335, + "grad_norm": 3.6187594918793944, + "learning_rate": 3.327786168234792e-08, + "loss": 0.5743, + "step": 22655 + }, + { + "epoch": 0.9348848724931914, + "grad_norm": 3.342691481926543, + "learning_rate": 3.3235880778332675e-08, + "loss": 0.4516, + "step": 22656 + }, + { + "epoch": 0.9349261368325493, + "grad_norm": 8.206013426090196, + "learning_rate": 3.319392607439387e-08, + "loss": 0.5131, + "step": 22657 + }, + { + "epoch": 0.9349674011719072, + "grad_norm": 3.725755870329388, + "learning_rate": 3.31519975712809e-08, + "loss": 0.5588, + "step": 22658 + }, + { + "epoch": 0.9350086655112652, + "grad_norm": 3.3168265780483472, + "learning_rate": 3.311009526974268e-08, + "loss": 0.4949, + "step": 22659 + }, + { + "epoch": 0.9350499298506231, + "grad_norm": 2.5371051295918297, + "learning_rate": 3.306821917052777e-08, + "loss": 0.5118, + "step": 22660 + }, + { + "epoch": 0.935091194189981, + "grad_norm": 4.214184509108311, + "learning_rate": 3.302636927438441e-08, + "loss": 0.5562, + "step": 22661 + }, + { + "epoch": 0.9351324585293389, + "grad_norm": 2.6411639152689723, + "learning_rate": 3.298454558205966e-08, + "loss": 0.5036, + "step": 22662 + }, + { + "epoch": 0.9351737228686968, + "grad_norm": 47.847796338030605, + "learning_rate": 3.2942748094300924e-08, + "loss": 0.5304, + "step": 22663 + }, + { + "epoch": 0.9352149872080548, + "grad_norm": 3.0658498688584337, + "learning_rate": 3.290097681185478e-08, + "loss": 0.5234, + "step": 22664 + }, + { + "epoch": 0.9352562515474128, + "grad_norm": 2.6526778943156306, + "learning_rate": 3.285923173546712e-08, + "loss": 0.4953, + "step": 22665 + }, + { + "epoch": 0.9352975158867707, + "grad_norm": 1.8809937492379114, + "learning_rate": 3.281751286588402e-08, + "loss": 0.537, + "step": 22666 + }, + { + "epoch": 0.9353387802261286, + "grad_norm": 4.119931674882001, + "learning_rate": 3.277582020385023e-08, + "loss": 0.5781, + "step": 22667 + }, + { + "epoch": 0.9353800445654865, + "grad_norm": 4.900558895997313, + "learning_rate": 3.27341537501108e-08, + "loss": 0.5226, + "step": 22668 + }, + { + "epoch": 0.9354213089048444, + "grad_norm": 5.624465263714209, + "learning_rate": 3.269251350540997e-08, + "loss": 0.5131, + "step": 22669 + }, + { + "epoch": 0.9354625732442023, + "grad_norm": 4.313261293762733, + "learning_rate": 3.265089947049166e-08, + "loss": 0.4692, + "step": 22670 + }, + { + "epoch": 0.9355038375835603, + "grad_norm": 10.3048716800244, + "learning_rate": 3.260931164609876e-08, + "loss": 0.4805, + "step": 22671 + }, + { + "epoch": 0.9355451019229182, + "grad_norm": 6.877289541355204, + "learning_rate": 3.256775003297452e-08, + "loss": 0.4614, + "step": 22672 + }, + { + "epoch": 0.9355863662622761, + "grad_norm": 2.287666599999244, + "learning_rate": 3.252621463186117e-08, + "loss": 0.4987, + "step": 22673 + }, + { + "epoch": 0.9356276306016341, + "grad_norm": 4.76248072832472, + "learning_rate": 3.24847054435008e-08, + "loss": 0.4805, + "step": 22674 + }, + { + "epoch": 0.935668894940992, + "grad_norm": 4.649472329154728, + "learning_rate": 3.244322246863462e-08, + "loss": 0.4886, + "step": 22675 + }, + { + "epoch": 0.93571015928035, + "grad_norm": 30.99696102587258, + "learning_rate": 3.240176570800374e-08, + "loss": 0.5085, + "step": 22676 + }, + { + "epoch": 0.9357514236197079, + "grad_norm": 2.1839266249127647, + "learning_rate": 3.236033516234888e-08, + "loss": 0.5177, + "step": 22677 + }, + { + "epoch": 0.9357926879590658, + "grad_norm": 2.7487562974198068, + "learning_rate": 3.2318930832409774e-08, + "loss": 0.546, + "step": 22678 + }, + { + "epoch": 0.9358339522984237, + "grad_norm": 5.87384709489812, + "learning_rate": 3.2277552718926185e-08, + "loss": 0.531, + "step": 22679 + }, + { + "epoch": 0.9358752166377816, + "grad_norm": 2.1789317270595987, + "learning_rate": 3.2236200822637005e-08, + "loss": 0.4698, + "step": 22680 + }, + { + "epoch": 0.9359164809771395, + "grad_norm": 2.719878170590277, + "learning_rate": 3.219487514428116e-08, + "loss": 0.4888, + "step": 22681 + }, + { + "epoch": 0.9359577453164974, + "grad_norm": 1.871440977886638, + "learning_rate": 3.215357568459687e-08, + "loss": 0.5524, + "step": 22682 + }, + { + "epoch": 0.9359990096558554, + "grad_norm": 2.8921776242188812, + "learning_rate": 3.211230244432156e-08, + "loss": 0.4889, + "step": 22683 + }, + { + "epoch": 0.9360402739952134, + "grad_norm": 6.124943727496606, + "learning_rate": 3.207105542419264e-08, + "loss": 0.4715, + "step": 22684 + }, + { + "epoch": 0.9360815383345713, + "grad_norm": 2.337582558933697, + "learning_rate": 3.2029834624946854e-08, + "loss": 0.4858, + "step": 22685 + }, + { + "epoch": 0.9361228026739292, + "grad_norm": 16.148176765264463, + "learning_rate": 3.198864004732077e-08, + "loss": 0.4713, + "step": 22686 + }, + { + "epoch": 0.9361640670132871, + "grad_norm": 5.195050217437568, + "learning_rate": 3.1947471692049644e-08, + "loss": 0.5003, + "step": 22687 + }, + { + "epoch": 0.936205331352645, + "grad_norm": 2.3437943864493223, + "learning_rate": 3.190632955986922e-08, + "loss": 0.5263, + "step": 22688 + }, + { + "epoch": 0.936246595692003, + "grad_norm": 2.6258073870549845, + "learning_rate": 3.18652136515144e-08, + "loss": 0.5232, + "step": 22689 + }, + { + "epoch": 0.9362878600313609, + "grad_norm": 2.2426691340809373, + "learning_rate": 3.1824123967719946e-08, + "loss": 0.4957, + "step": 22690 + }, + { + "epoch": 0.9363291243707188, + "grad_norm": 2.279762651972304, + "learning_rate": 3.178306050921909e-08, + "loss": 0.453, + "step": 22691 + }, + { + "epoch": 0.9363703887100767, + "grad_norm": 4.272170428564827, + "learning_rate": 3.174202327674558e-08, + "loss": 0.4852, + "step": 22692 + }, + { + "epoch": 0.9364116530494346, + "grad_norm": 2.2116354235790796, + "learning_rate": 3.170101227103267e-08, + "loss": 0.5109, + "step": 22693 + }, + { + "epoch": 0.9364529173887927, + "grad_norm": 4.436997688973729, + "learning_rate": 3.166002749281294e-08, + "loss": 0.5031, + "step": 22694 + }, + { + "epoch": 0.9364941817281506, + "grad_norm": 3.5766816150064735, + "learning_rate": 3.161906894281813e-08, + "loss": 0.4488, + "step": 22695 + }, + { + "epoch": 0.9365354460675085, + "grad_norm": 8.917105401574654, + "learning_rate": 3.157813662178016e-08, + "loss": 0.5039, + "step": 22696 + }, + { + "epoch": 0.9365767104068664, + "grad_norm": 3.48398040581816, + "learning_rate": 3.153723053043012e-08, + "loss": 0.5345, + "step": 22697 + }, + { + "epoch": 0.9366179747462243, + "grad_norm": 16.16587231840496, + "learning_rate": 3.1496350669498744e-08, + "loss": 0.4868, + "step": 22698 + }, + { + "epoch": 0.9366592390855822, + "grad_norm": 3.3875517911142405, + "learning_rate": 3.145549703971612e-08, + "loss": 0.5903, + "step": 22699 + }, + { + "epoch": 0.9367005034249402, + "grad_norm": 2.552710179473114, + "learning_rate": 3.1414669641812e-08, + "loss": 0.5095, + "step": 22700 + }, + { + "epoch": 0.9367417677642981, + "grad_norm": 2.586104705537839, + "learning_rate": 3.1373868476515634e-08, + "loss": 0.5389, + "step": 22701 + }, + { + "epoch": 0.936783032103656, + "grad_norm": 11.150997488111958, + "learning_rate": 3.13330935445561e-08, + "loss": 0.5155, + "step": 22702 + }, + { + "epoch": 0.9368242964430139, + "grad_norm": 3.854534664029638, + "learning_rate": 3.129234484666166e-08, + "loss": 0.5428, + "step": 22703 + }, + { + "epoch": 0.9368655607823718, + "grad_norm": 6.9275180563624925, + "learning_rate": 3.1251622383559884e-08, + "loss": 0.5381, + "step": 22704 + }, + { + "epoch": 0.9369068251217298, + "grad_norm": 3.980199481985795, + "learning_rate": 3.121092615597837e-08, + "loss": 0.4997, + "step": 22705 + }, + { + "epoch": 0.9369480894610878, + "grad_norm": 3.142255212291202, + "learning_rate": 3.117025616464403e-08, + "loss": 0.5002, + "step": 22706 + }, + { + "epoch": 0.9369893538004457, + "grad_norm": 7.904945633845771, + "learning_rate": 3.112961241028345e-08, + "loss": 0.5299, + "step": 22707 + }, + { + "epoch": 0.9370306181398036, + "grad_norm": 2.347654858251666, + "learning_rate": 3.1088994893622545e-08, + "loss": 0.5352, + "step": 22708 + }, + { + "epoch": 0.9370718824791615, + "grad_norm": 3.0822341073694646, + "learning_rate": 3.104840361538674e-08, + "loss": 0.5002, + "step": 22709 + }, + { + "epoch": 0.9371131468185194, + "grad_norm": 3.083221054138584, + "learning_rate": 3.100783857630146e-08, + "loss": 0.4973, + "step": 22710 + }, + { + "epoch": 0.9371544111578773, + "grad_norm": 2.0055837338993783, + "learning_rate": 3.0967299777090785e-08, + "loss": 0.5184, + "step": 22711 + }, + { + "epoch": 0.9371956754972353, + "grad_norm": 3.661920372926615, + "learning_rate": 3.092678721847914e-08, + "loss": 0.5256, + "step": 22712 + }, + { + "epoch": 0.9372369398365932, + "grad_norm": 4.7348265762708035, + "learning_rate": 3.08863009011901e-08, + "loss": 0.5542, + "step": 22713 + }, + { + "epoch": 0.9372782041759511, + "grad_norm": 5.821151980777483, + "learning_rate": 3.08458408259471e-08, + "loss": 0.5424, + "step": 22714 + }, + { + "epoch": 0.9373194685153091, + "grad_norm": 7.527289683258286, + "learning_rate": 3.08054069934724e-08, + "loss": 0.4921, + "step": 22715 + }, + { + "epoch": 0.937360732854667, + "grad_norm": 3.131084391978443, + "learning_rate": 3.076499940448873e-08, + "loss": 0.5039, + "step": 22716 + }, + { + "epoch": 0.937401997194025, + "grad_norm": 10.701878397874447, + "learning_rate": 3.072461805971738e-08, + "loss": 0.504, + "step": 22717 + }, + { + "epoch": 0.9374432615333829, + "grad_norm": 2.484529618251142, + "learning_rate": 3.0684262959880086e-08, + "loss": 0.5292, + "step": 22718 + }, + { + "epoch": 0.9374845258727408, + "grad_norm": 21.431601700488063, + "learning_rate": 3.06439341056976e-08, + "loss": 0.6071, + "step": 22719 + }, + { + "epoch": 0.9375257902120987, + "grad_norm": 11.722487899563324, + "learning_rate": 3.060363149789003e-08, + "loss": 0.5345, + "step": 22720 + }, + { + "epoch": 0.9375670545514566, + "grad_norm": 3.7888077653549717, + "learning_rate": 3.0563355137177453e-08, + "loss": 0.5322, + "step": 22721 + }, + { + "epoch": 0.9376083188908145, + "grad_norm": 8.436566324499942, + "learning_rate": 3.05231050242793e-08, + "loss": 0.5404, + "step": 22722 + }, + { + "epoch": 0.9376495832301724, + "grad_norm": 2.223582020555445, + "learning_rate": 3.048288115991482e-08, + "loss": 0.4956, + "step": 22723 + }, + { + "epoch": 0.9376908475695304, + "grad_norm": 3.859253037727396, + "learning_rate": 3.0442683544801955e-08, + "loss": 0.544, + "step": 22724 + }, + { + "epoch": 0.9377321119088884, + "grad_norm": 2.7937892541469123, + "learning_rate": 3.040251217965912e-08, + "loss": 0.4893, + "step": 22725 + }, + { + "epoch": 0.9377733762482463, + "grad_norm": 1.943117678476628, + "learning_rate": 3.0362367065203735e-08, + "loss": 0.477, + "step": 22726 + }, + { + "epoch": 0.9378146405876042, + "grad_norm": 2.8952201489294147, + "learning_rate": 3.032224820215324e-08, + "loss": 0.4917, + "step": 22727 + }, + { + "epoch": 0.9378559049269621, + "grad_norm": 7.696857396086803, + "learning_rate": 3.0282155591223716e-08, + "loss": 0.4935, + "step": 22728 + }, + { + "epoch": 0.93789716926632, + "grad_norm": 2.98007379926025, + "learning_rate": 3.02420892331316e-08, + "loss": 0.5127, + "step": 22729 + }, + { + "epoch": 0.937938433605678, + "grad_norm": 2.973515851984489, + "learning_rate": 3.020204912859248e-08, + "loss": 0.5422, + "step": 22730 + }, + { + "epoch": 0.9379796979450359, + "grad_norm": 10.181125793380655, + "learning_rate": 3.016203527832179e-08, + "loss": 0.4501, + "step": 22731 + }, + { + "epoch": 0.9380209622843938, + "grad_norm": 2.441374968831959, + "learning_rate": 3.0122047683034116e-08, + "loss": 0.4885, + "step": 22732 + }, + { + "epoch": 0.9380622266237517, + "grad_norm": 4.961675000924636, + "learning_rate": 3.008208634344356e-08, + "loss": 0.5268, + "step": 22733 + }, + { + "epoch": 0.9381034909631096, + "grad_norm": 2.576445464547488, + "learning_rate": 3.004215126026422e-08, + "loss": 0.5212, + "step": 22734 + }, + { + "epoch": 0.9381447553024677, + "grad_norm": 2.6972524549703234, + "learning_rate": 3.000224243420935e-08, + "loss": 0.5288, + "step": 22735 + }, + { + "epoch": 0.9381860196418256, + "grad_norm": 4.529671548323044, + "learning_rate": 2.9962359865991714e-08, + "loss": 0.451, + "step": 22736 + }, + { + "epoch": 0.9382272839811835, + "grad_norm": 3.8569268940289048, + "learning_rate": 2.9922503556323755e-08, + "loss": 0.5211, + "step": 22737 + }, + { + "epoch": 0.9382685483205414, + "grad_norm": 3.194569688883502, + "learning_rate": 2.9882673505917555e-08, + "loss": 0.4369, + "step": 22738 + }, + { + "epoch": 0.9383098126598993, + "grad_norm": 2.585044657390405, + "learning_rate": 2.9842869715484554e-08, + "loss": 0.4456, + "step": 22739 + }, + { + "epoch": 0.9383510769992572, + "grad_norm": 3.3558239967146934, + "learning_rate": 2.980309218573568e-08, + "loss": 0.5086, + "step": 22740 + }, + { + "epoch": 0.9383923413386152, + "grad_norm": 3.2122232290591355, + "learning_rate": 2.97633409173812e-08, + "loss": 0.504, + "step": 22741 + }, + { + "epoch": 0.9384336056779731, + "grad_norm": 13.056452702055244, + "learning_rate": 2.9723615911131707e-08, + "loss": 0.5859, + "step": 22742 + }, + { + "epoch": 0.938474870017331, + "grad_norm": 2.4320887745681006, + "learning_rate": 2.9683917167696307e-08, + "loss": 0.4308, + "step": 22743 + }, + { + "epoch": 0.9385161343566889, + "grad_norm": 7.444970511446944, + "learning_rate": 2.9644244687784595e-08, + "loss": 0.5304, + "step": 22744 + }, + { + "epoch": 0.9385573986960469, + "grad_norm": 2.431765108889658, + "learning_rate": 2.9604598472104673e-08, + "loss": 0.5454, + "step": 22745 + }, + { + "epoch": 0.9385986630354048, + "grad_norm": 7.751621759132562, + "learning_rate": 2.9564978521364972e-08, + "loss": 0.5046, + "step": 22746 + }, + { + "epoch": 0.9386399273747628, + "grad_norm": 2.1856159862525946, + "learning_rate": 2.952538483627326e-08, + "loss": 0.5096, + "step": 22747 + }, + { + "epoch": 0.9386811917141207, + "grad_norm": 3.8025381286378823, + "learning_rate": 2.948581741753681e-08, + "loss": 0.5509, + "step": 22748 + }, + { + "epoch": 0.9387224560534786, + "grad_norm": 4.54480195721609, + "learning_rate": 2.9446276265862215e-08, + "loss": 0.5219, + "step": 22749 + }, + { + "epoch": 0.9387637203928365, + "grad_norm": 5.9693268064828136, + "learning_rate": 2.9406761381955915e-08, + "loss": 0.4759, + "step": 22750 + }, + { + "epoch": 0.9388049847321944, + "grad_norm": 6.402645743667785, + "learning_rate": 2.9367272766523844e-08, + "loss": 0.4691, + "step": 22751 + }, + { + "epoch": 0.9388462490715523, + "grad_norm": 3.462787552723975, + "learning_rate": 2.9327810420271105e-08, + "loss": 0.49, + "step": 22752 + }, + { + "epoch": 0.9388875134109103, + "grad_norm": 3.4165837109356603, + "learning_rate": 2.9288374343902802e-08, + "loss": 0.523, + "step": 22753 + }, + { + "epoch": 0.9389287777502682, + "grad_norm": 4.394581537528009, + "learning_rate": 2.9248964538123202e-08, + "loss": 0.4747, + "step": 22754 + }, + { + "epoch": 0.9389700420896262, + "grad_norm": 2.8982027822637346, + "learning_rate": 2.9209581003636577e-08, + "loss": 0.5343, + "step": 22755 + }, + { + "epoch": 0.9390113064289841, + "grad_norm": 3.9470606601414158, + "learning_rate": 2.917022374114603e-08, + "loss": 0.5044, + "step": 22756 + }, + { + "epoch": 0.939052570768342, + "grad_norm": 3.3495787017627374, + "learning_rate": 2.9130892751354666e-08, + "loss": 0.5063, + "step": 22757 + }, + { + "epoch": 0.9390938351077, + "grad_norm": 3.44237058916869, + "learning_rate": 2.9091588034965256e-08, + "loss": 0.4415, + "step": 22758 + }, + { + "epoch": 0.9391350994470579, + "grad_norm": 4.7101315085092725, + "learning_rate": 2.905230959267957e-08, + "loss": 0.5822, + "step": 22759 + }, + { + "epoch": 0.9391763637864158, + "grad_norm": 2.2436679720492037, + "learning_rate": 2.9013057425199553e-08, + "loss": 0.5102, + "step": 22760 + }, + { + "epoch": 0.9392176281257737, + "grad_norm": 3.2220379074076657, + "learning_rate": 2.8973831533226137e-08, + "loss": 0.5149, + "step": 22761 + }, + { + "epoch": 0.9392588924651316, + "grad_norm": 2.4531019331896937, + "learning_rate": 2.893463191745993e-08, + "loss": 0.5529, + "step": 22762 + }, + { + "epoch": 0.9393001568044895, + "grad_norm": 3.3293769142570517, + "learning_rate": 2.8895458578601207e-08, + "loss": 0.4869, + "step": 22763 + }, + { + "epoch": 0.9393414211438474, + "grad_norm": 1.9433563364241029, + "learning_rate": 2.885631151734991e-08, + "loss": 0.4654, + "step": 22764 + }, + { + "epoch": 0.9393826854832054, + "grad_norm": 2.1259238370128886, + "learning_rate": 2.8817190734404975e-08, + "loss": 0.5113, + "step": 22765 + }, + { + "epoch": 0.9394239498225634, + "grad_norm": 4.05406828702456, + "learning_rate": 2.8778096230465345e-08, + "loss": 0.5243, + "step": 22766 + }, + { + "epoch": 0.9394652141619213, + "grad_norm": 2.6294870982123664, + "learning_rate": 2.8739028006229463e-08, + "loss": 0.4723, + "step": 22767 + }, + { + "epoch": 0.9395064785012792, + "grad_norm": 6.797413596342536, + "learning_rate": 2.86999860623951e-08, + "loss": 0.5566, + "step": 22768 + }, + { + "epoch": 0.9395477428406371, + "grad_norm": 2.3702377522664912, + "learning_rate": 2.8660970399659703e-08, + "loss": 0.5377, + "step": 22769 + }, + { + "epoch": 0.939589007179995, + "grad_norm": 1.710305201417999, + "learning_rate": 2.862198101871988e-08, + "loss": 0.4473, + "step": 22770 + }, + { + "epoch": 0.939630271519353, + "grad_norm": 4.346874484726521, + "learning_rate": 2.8583017920272237e-08, + "loss": 0.5209, + "step": 22771 + }, + { + "epoch": 0.9396715358587109, + "grad_norm": 2.4899584048354497, + "learning_rate": 2.8544081105013054e-08, + "loss": 0.4587, + "step": 22772 + }, + { + "epoch": 0.9397128001980688, + "grad_norm": 4.52905031304722, + "learning_rate": 2.850517057363744e-08, + "loss": 0.4941, + "step": 22773 + }, + { + "epoch": 0.9397540645374267, + "grad_norm": 3.828014667452232, + "learning_rate": 2.8466286326840673e-08, + "loss": 0.573, + "step": 22774 + }, + { + "epoch": 0.9397953288767846, + "grad_norm": 2.749315682644439, + "learning_rate": 2.8427428365317198e-08, + "loss": 0.5453, + "step": 22775 + }, + { + "epoch": 0.9398365932161427, + "grad_norm": 2.7902666973832972, + "learning_rate": 2.8388596689761293e-08, + "loss": 0.5051, + "step": 22776 + }, + { + "epoch": 0.9398778575555006, + "grad_norm": 2.6337774754975047, + "learning_rate": 2.8349791300866234e-08, + "loss": 0.5824, + "step": 22777 + }, + { + "epoch": 0.9399191218948585, + "grad_norm": 3.7254945284290057, + "learning_rate": 2.831101219932547e-08, + "loss": 0.4997, + "step": 22778 + }, + { + "epoch": 0.9399603862342164, + "grad_norm": 2.3087231475940078, + "learning_rate": 2.827225938583161e-08, + "loss": 0.5126, + "step": 22779 + }, + { + "epoch": 0.9400016505735743, + "grad_norm": 6.849062403738467, + "learning_rate": 2.8233532861077104e-08, + "loss": 0.4447, + "step": 22780 + }, + { + "epoch": 0.9400429149129322, + "grad_norm": 17.725651301867288, + "learning_rate": 2.8194832625753396e-08, + "loss": 0.4683, + "step": 22781 + }, + { + "epoch": 0.9400841792522902, + "grad_norm": 2.3011317120127273, + "learning_rate": 2.8156158680551935e-08, + "loss": 0.4717, + "step": 22782 + }, + { + "epoch": 0.9401254435916481, + "grad_norm": 2.2048468797071594, + "learning_rate": 2.8117511026163333e-08, + "loss": 0.5221, + "step": 22783 + }, + { + "epoch": 0.940166707931006, + "grad_norm": 4.101835471513927, + "learning_rate": 2.807888966327804e-08, + "loss": 0.4613, + "step": 22784 + }, + { + "epoch": 0.9402079722703639, + "grad_norm": 3.1865117077420066, + "learning_rate": 2.8040294592586003e-08, + "loss": 0.494, + "step": 22785 + }, + { + "epoch": 0.9402492366097219, + "grad_norm": 2.035383195652394, + "learning_rate": 2.800172581477667e-08, + "loss": 0.527, + "step": 22786 + }, + { + "epoch": 0.9402905009490798, + "grad_norm": 9.53257297488969, + "learning_rate": 2.796318333053882e-08, + "loss": 0.4994, + "step": 22787 + }, + { + "epoch": 0.9403317652884378, + "grad_norm": 4.214497093327834, + "learning_rate": 2.7924667140561078e-08, + "loss": 0.5267, + "step": 22788 + }, + { + "epoch": 0.9403730296277957, + "grad_norm": 3.7489307105006326, + "learning_rate": 2.788617724553122e-08, + "loss": 0.5816, + "step": 22789 + }, + { + "epoch": 0.9404142939671536, + "grad_norm": 6.8033086100323725, + "learning_rate": 2.7847713646136863e-08, + "loss": 0.5247, + "step": 22790 + }, + { + "epoch": 0.9404555583065115, + "grad_norm": 20.331227388566948, + "learning_rate": 2.7809276343065125e-08, + "loss": 0.5457, + "step": 22791 + }, + { + "epoch": 0.9404968226458694, + "grad_norm": 9.264459053229764, + "learning_rate": 2.7770865337002793e-08, + "loss": 0.504, + "step": 22792 + }, + { + "epoch": 0.9405380869852273, + "grad_norm": 3.8551845238635343, + "learning_rate": 2.773248062863548e-08, + "loss": 0.4878, + "step": 22793 + }, + { + "epoch": 0.9405793513245853, + "grad_norm": 3.9805438417681684, + "learning_rate": 2.7694122218649143e-08, + "loss": 0.5084, + "step": 22794 + }, + { + "epoch": 0.9406206156639432, + "grad_norm": 19.30083702473192, + "learning_rate": 2.765579010772923e-08, + "loss": 0.5147, + "step": 22795 + }, + { + "epoch": 0.9406618800033012, + "grad_norm": 2.408952379653643, + "learning_rate": 2.7617484296559857e-08, + "loss": 0.4848, + "step": 22796 + }, + { + "epoch": 0.9407031443426591, + "grad_norm": 2.5855228105378094, + "learning_rate": 2.7579204785825652e-08, + "loss": 0.5436, + "step": 22797 + }, + { + "epoch": 0.940744408682017, + "grad_norm": 53.56841070934583, + "learning_rate": 2.754095157621023e-08, + "loss": 0.5232, + "step": 22798 + }, + { + "epoch": 0.940785673021375, + "grad_norm": 4.815915341077271, + "learning_rate": 2.750272466839687e-08, + "loss": 0.508, + "step": 22799 + }, + { + "epoch": 0.9408269373607329, + "grad_norm": 2.584350884856051, + "learning_rate": 2.746452406306854e-08, + "loss": 0.4683, + "step": 22800 + }, + { + "epoch": 0.9408682017000908, + "grad_norm": 5.342264776899545, + "learning_rate": 2.7426349760907688e-08, + "loss": 0.4955, + "step": 22801 + }, + { + "epoch": 0.9409094660394487, + "grad_norm": 5.088472948975728, + "learning_rate": 2.7388201762595933e-08, + "loss": 0.535, + "step": 22802 + }, + { + "epoch": 0.9409507303788066, + "grad_norm": 2.958742814682864, + "learning_rate": 2.7350080068814733e-08, + "loss": 0.5104, + "step": 22803 + }, + { + "epoch": 0.9409919947181645, + "grad_norm": 2.2966685802110813, + "learning_rate": 2.7311984680245038e-08, + "loss": 0.4608, + "step": 22804 + }, + { + "epoch": 0.9410332590575224, + "grad_norm": 4.872437226757543, + "learning_rate": 2.7273915597567645e-08, + "loss": 0.5297, + "step": 22805 + }, + { + "epoch": 0.9410745233968805, + "grad_norm": 6.256994456168623, + "learning_rate": 2.723587282146217e-08, + "loss": 0.52, + "step": 22806 + }, + { + "epoch": 0.9411157877362384, + "grad_norm": 3.0948135978754707, + "learning_rate": 2.7197856352608242e-08, + "loss": 0.5262, + "step": 22807 + }, + { + "epoch": 0.9411570520755963, + "grad_norm": 3.385827542052559, + "learning_rate": 2.7159866191685144e-08, + "loss": 0.5034, + "step": 22808 + }, + { + "epoch": 0.9411983164149542, + "grad_norm": 7.043256911344278, + "learning_rate": 2.7121902339371342e-08, + "loss": 0.5102, + "step": 22809 + }, + { + "epoch": 0.9412395807543121, + "grad_norm": 3.180606916187393, + "learning_rate": 2.7083964796344783e-08, + "loss": 0.5456, + "step": 22810 + }, + { + "epoch": 0.94128084509367, + "grad_norm": 3.6678049576400906, + "learning_rate": 2.704605356328327e-08, + "loss": 0.5018, + "step": 22811 + }, + { + "epoch": 0.941322109433028, + "grad_norm": 2.615891611730778, + "learning_rate": 2.7008168640864082e-08, + "loss": 0.5396, + "step": 22812 + }, + { + "epoch": 0.9413633737723859, + "grad_norm": 3.112280789554121, + "learning_rate": 2.6970310029764022e-08, + "loss": 0.4891, + "step": 22813 + }, + { + "epoch": 0.9414046381117438, + "grad_norm": 2.771002712165326, + "learning_rate": 2.6932477730658878e-08, + "loss": 0.5605, + "step": 22814 + }, + { + "epoch": 0.9414459024511017, + "grad_norm": 3.356157309095236, + "learning_rate": 2.689467174422494e-08, + "loss": 0.4765, + "step": 22815 + }, + { + "epoch": 0.9414871667904597, + "grad_norm": 9.42836548844357, + "learning_rate": 2.6856892071137174e-08, + "loss": 0.5412, + "step": 22816 + }, + { + "epoch": 0.9415284311298177, + "grad_norm": 2.986128233397231, + "learning_rate": 2.6819138712070702e-08, + "loss": 0.518, + "step": 22817 + }, + { + "epoch": 0.9415696954691756, + "grad_norm": 3.4984898314827033, + "learning_rate": 2.678141166769965e-08, + "loss": 0.4731, + "step": 22818 + }, + { + "epoch": 0.9416109598085335, + "grad_norm": 2.1720053389561573, + "learning_rate": 2.6743710938697984e-08, + "loss": 0.4972, + "step": 22819 + }, + { + "epoch": 0.9416522241478914, + "grad_norm": 3.247871186091501, + "learning_rate": 2.6706036525739165e-08, + "loss": 0.4967, + "step": 22820 + }, + { + "epoch": 0.9416934884872493, + "grad_norm": 3.0495205127741967, + "learning_rate": 2.6668388429496148e-08, + "loss": 0.5199, + "step": 22821 + }, + { + "epoch": 0.9417347528266072, + "grad_norm": 2.7832965896863513, + "learning_rate": 2.663076665064157e-08, + "loss": 0.5455, + "step": 22822 + }, + { + "epoch": 0.9417760171659652, + "grad_norm": 2.619025184175202, + "learning_rate": 2.6593171189847222e-08, + "loss": 0.4765, + "step": 22823 + }, + { + "epoch": 0.9418172815053231, + "grad_norm": 2.2898040092943246, + "learning_rate": 2.6555602047784734e-08, + "loss": 0.4896, + "step": 22824 + }, + { + "epoch": 0.941858545844681, + "grad_norm": 5.633514318355421, + "learning_rate": 2.6518059225125067e-08, + "loss": 0.5275, + "step": 22825 + }, + { + "epoch": 0.9418998101840389, + "grad_norm": 13.246799670299247, + "learning_rate": 2.648054272253919e-08, + "loss": 0.5198, + "step": 22826 + }, + { + "epoch": 0.9419410745233969, + "grad_norm": 2.2963001757016097, + "learning_rate": 2.6443052540696733e-08, + "loss": 0.4403, + "step": 22827 + }, + { + "epoch": 0.9419823388627548, + "grad_norm": 3.605024246401486, + "learning_rate": 2.6405588680267823e-08, + "loss": 0.5, + "step": 22828 + }, + { + "epoch": 0.9420236032021128, + "grad_norm": 3.12946435174232, + "learning_rate": 2.6368151141921426e-08, + "loss": 0.5228, + "step": 22829 + }, + { + "epoch": 0.9420648675414707, + "grad_norm": 3.6771470284463628, + "learning_rate": 2.633073992632634e-08, + "loss": 0.5813, + "step": 22830 + }, + { + "epoch": 0.9421061318808286, + "grad_norm": 3.2858956450951786, + "learning_rate": 2.62933550341507e-08, + "loss": 0.4973, + "step": 22831 + }, + { + "epoch": 0.9421473962201865, + "grad_norm": 5.096934850881787, + "learning_rate": 2.6255996466062306e-08, + "loss": 0.5284, + "step": 22832 + }, + { + "epoch": 0.9421886605595444, + "grad_norm": 2.2234781756240425, + "learning_rate": 2.6218664222728783e-08, + "loss": 0.4881, + "step": 22833 + }, + { + "epoch": 0.9422299248989023, + "grad_norm": 16.313517956647473, + "learning_rate": 2.618135830481677e-08, + "loss": 0.4966, + "step": 22834 + }, + { + "epoch": 0.9422711892382603, + "grad_norm": 8.699327679884016, + "learning_rate": 2.6144078712992568e-08, + "loss": 0.5513, + "step": 22835 + }, + { + "epoch": 0.9423124535776182, + "grad_norm": 3.5244417729200115, + "learning_rate": 2.6106825447921978e-08, + "loss": 0.5293, + "step": 22836 + }, + { + "epoch": 0.9423537179169762, + "grad_norm": 2.1699403250113507, + "learning_rate": 2.6069598510270632e-08, + "loss": 0.4764, + "step": 22837 + }, + { + "epoch": 0.9423949822563341, + "grad_norm": 3.761760983639604, + "learning_rate": 2.6032397900703663e-08, + "loss": 0.4732, + "step": 22838 + }, + { + "epoch": 0.942436246595692, + "grad_norm": 3.6530123211179086, + "learning_rate": 2.599522361988521e-08, + "loss": 0.4475, + "step": 22839 + }, + { + "epoch": 0.94247751093505, + "grad_norm": 4.040324473345429, + "learning_rate": 2.5958075668479574e-08, + "loss": 0.5083, + "step": 22840 + }, + { + "epoch": 0.9425187752744079, + "grad_norm": 2.310660536246177, + "learning_rate": 2.592095404715006e-08, + "loss": 0.4719, + "step": 22841 + }, + { + "epoch": 0.9425600396137658, + "grad_norm": 8.715420089174314, + "learning_rate": 2.5883858756559965e-08, + "loss": 0.5444, + "step": 22842 + }, + { + "epoch": 0.9426013039531237, + "grad_norm": 2.1859092499834167, + "learning_rate": 2.584678979737176e-08, + "loss": 0.5008, + "step": 22843 + }, + { + "epoch": 0.9426425682924816, + "grad_norm": 8.754886510682471, + "learning_rate": 2.5809747170247756e-08, + "loss": 0.472, + "step": 22844 + }, + { + "epoch": 0.9426838326318395, + "grad_norm": 2.920199011156709, + "learning_rate": 2.5772730875849414e-08, + "loss": 0.5248, + "step": 22845 + }, + { + "epoch": 0.9427250969711974, + "grad_norm": 10.909483126975266, + "learning_rate": 2.5735740914838212e-08, + "loss": 0.471, + "step": 22846 + }, + { + "epoch": 0.9427663613105555, + "grad_norm": 3.243046121343665, + "learning_rate": 2.5698777287874618e-08, + "loss": 0.5076, + "step": 22847 + }, + { + "epoch": 0.9428076256499134, + "grad_norm": 3.550472718270495, + "learning_rate": 2.5661839995619106e-08, + "loss": 0.5437, + "step": 22848 + }, + { + "epoch": 0.9428488899892713, + "grad_norm": 2.217299874590805, + "learning_rate": 2.5624929038731147e-08, + "loss": 0.487, + "step": 22849 + }, + { + "epoch": 0.9428901543286292, + "grad_norm": 2.4814660316424506, + "learning_rate": 2.5588044417870547e-08, + "loss": 0.5104, + "step": 22850 + }, + { + "epoch": 0.9429314186679871, + "grad_norm": 4.111662584774561, + "learning_rate": 2.5551186133695616e-08, + "loss": 0.512, + "step": 22851 + }, + { + "epoch": 0.942972683007345, + "grad_norm": 2.4727866519566852, + "learning_rate": 2.5514354186865153e-08, + "loss": 0.555, + "step": 22852 + }, + { + "epoch": 0.943013947346703, + "grad_norm": 7.942123671748049, + "learning_rate": 2.5477548578036803e-08, + "loss": 0.5119, + "step": 22853 + }, + { + "epoch": 0.9430552116860609, + "grad_norm": 3.0987841600897346, + "learning_rate": 2.5440769307868374e-08, + "loss": 0.495, + "step": 22854 + }, + { + "epoch": 0.9430964760254188, + "grad_norm": 3.9229350376328656, + "learning_rate": 2.5404016377016336e-08, + "loss": 0.5403, + "step": 22855 + }, + { + "epoch": 0.9431377403647767, + "grad_norm": 2.6814556995617727, + "learning_rate": 2.5367289786137503e-08, + "loss": 0.5309, + "step": 22856 + }, + { + "epoch": 0.9431790047041347, + "grad_norm": 76.95389764813228, + "learning_rate": 2.533058953588785e-08, + "loss": 0.4861, + "step": 22857 + }, + { + "epoch": 0.9432202690434927, + "grad_norm": 3.3949996605494333, + "learning_rate": 2.5293915626923013e-08, + "loss": 0.4927, + "step": 22858 + }, + { + "epoch": 0.9432615333828506, + "grad_norm": 3.4079048024133076, + "learning_rate": 2.5257268059897808e-08, + "loss": 0.4748, + "step": 22859 + }, + { + "epoch": 0.9433027977222085, + "grad_norm": 5.03940354257987, + "learning_rate": 2.522064683546721e-08, + "loss": 0.5551, + "step": 22860 + }, + { + "epoch": 0.9433440620615664, + "grad_norm": 3.6988617188486117, + "learning_rate": 2.518405195428519e-08, + "loss": 0.4553, + "step": 22861 + }, + { + "epoch": 0.9433853264009243, + "grad_norm": 5.376925459059412, + "learning_rate": 2.5147483417005234e-08, + "loss": 0.455, + "step": 22862 + }, + { + "epoch": 0.9434265907402822, + "grad_norm": 5.317902063730931, + "learning_rate": 2.511094122428098e-08, + "loss": 0.4824, + "step": 22863 + }, + { + "epoch": 0.9434678550796401, + "grad_norm": 2.600456721813064, + "learning_rate": 2.507442537676491e-08, + "loss": 0.4831, + "step": 22864 + }, + { + "epoch": 0.9435091194189981, + "grad_norm": 6.235722164045013, + "learning_rate": 2.5037935875109163e-08, + "loss": 0.4992, + "step": 22865 + }, + { + "epoch": 0.943550383758356, + "grad_norm": 3.122605240173714, + "learning_rate": 2.5001472719965725e-08, + "loss": 0.5068, + "step": 22866 + }, + { + "epoch": 0.943591648097714, + "grad_norm": 3.291748737851038, + "learning_rate": 2.4965035911985902e-08, + "loss": 0.5299, + "step": 22867 + }, + { + "epoch": 0.9436329124370719, + "grad_norm": 3.2806421893336326, + "learning_rate": 2.492862545182034e-08, + "loss": 0.5378, + "step": 22868 + }, + { + "epoch": 0.9436741767764298, + "grad_norm": 2.786591733618983, + "learning_rate": 2.4892241340119694e-08, + "loss": 0.515, + "step": 22869 + }, + { + "epoch": 0.9437154411157878, + "grad_norm": 4.206191700763494, + "learning_rate": 2.4855883577533934e-08, + "loss": 0.5734, + "step": 22870 + }, + { + "epoch": 0.9437567054551457, + "grad_norm": 2.8469472693355007, + "learning_rate": 2.481955216471221e-08, + "loss": 0.5183, + "step": 22871 + }, + { + "epoch": 0.9437979697945036, + "grad_norm": 8.817131094382072, + "learning_rate": 2.4783247102303508e-08, + "loss": 0.5025, + "step": 22872 + }, + { + "epoch": 0.9438392341338615, + "grad_norm": 5.0647950379100894, + "learning_rate": 2.474696839095647e-08, + "loss": 0.5845, + "step": 22873 + }, + { + "epoch": 0.9438804984732194, + "grad_norm": 3.218836205215461, + "learning_rate": 2.4710716031319246e-08, + "loss": 0.506, + "step": 22874 + }, + { + "epoch": 0.9439217628125773, + "grad_norm": 10.4557321287882, + "learning_rate": 2.467449002403932e-08, + "loss": 0.4973, + "step": 22875 + }, + { + "epoch": 0.9439630271519353, + "grad_norm": 3.141029598928992, + "learning_rate": 2.4638290369763506e-08, + "loss": 0.4832, + "step": 22876 + }, + { + "epoch": 0.9440042914912933, + "grad_norm": 4.832528169360797, + "learning_rate": 2.460211706913862e-08, + "loss": 0.4924, + "step": 22877 + }, + { + "epoch": 0.9440455558306512, + "grad_norm": 4.618524075619479, + "learning_rate": 2.456597012281081e-08, + "loss": 0.5387, + "step": 22878 + }, + { + "epoch": 0.9440868201700091, + "grad_norm": 4.8947711007985495, + "learning_rate": 2.4529849531425728e-08, + "loss": 0.4589, + "step": 22879 + }, + { + "epoch": 0.944128084509367, + "grad_norm": 2.4537299229144502, + "learning_rate": 2.4493755295628695e-08, + "loss": 0.4818, + "step": 22880 + }, + { + "epoch": 0.9441693488487249, + "grad_norm": 9.775542539089592, + "learning_rate": 2.445768741606419e-08, + "loss": 0.5322, + "step": 22881 + }, + { + "epoch": 0.9442106131880829, + "grad_norm": 3.4590716381394615, + "learning_rate": 2.44216458933767e-08, + "loss": 0.4559, + "step": 22882 + }, + { + "epoch": 0.9442518775274408, + "grad_norm": 2.1639864675875318, + "learning_rate": 2.438563072821004e-08, + "loss": 0.5147, + "step": 22883 + }, + { + "epoch": 0.9442931418667987, + "grad_norm": 5.803753451255432, + "learning_rate": 2.4349641921207365e-08, + "loss": 0.4969, + "step": 22884 + }, + { + "epoch": 0.9443344062061566, + "grad_norm": 2.88023906443054, + "learning_rate": 2.431367947301166e-08, + "loss": 0.4914, + "step": 22885 + }, + { + "epoch": 0.9443756705455145, + "grad_norm": 2.6492473261764866, + "learning_rate": 2.427774338426525e-08, + "loss": 0.5051, + "step": 22886 + }, + { + "epoch": 0.9444169348848724, + "grad_norm": 3.104711773546292, + "learning_rate": 2.4241833655609946e-08, + "loss": 0.4676, + "step": 22887 + }, + { + "epoch": 0.9444581992242305, + "grad_norm": 2.966682803409848, + "learning_rate": 2.420595028768724e-08, + "loss": 0.5168, + "step": 22888 + }, + { + "epoch": 0.9444994635635884, + "grad_norm": 4.020395618467729, + "learning_rate": 2.4170093281138283e-08, + "loss": 0.4698, + "step": 22889 + }, + { + "epoch": 0.9445407279029463, + "grad_norm": 3.5504250965452897, + "learning_rate": 2.4134262636603232e-08, + "loss": 0.5283, + "step": 22890 + }, + { + "epoch": 0.9445819922423042, + "grad_norm": 3.116273742670186, + "learning_rate": 2.409845835472241e-08, + "loss": 0.5158, + "step": 22891 + }, + { + "epoch": 0.9446232565816621, + "grad_norm": 3.902075058007869, + "learning_rate": 2.4062680436135298e-08, + "loss": 0.5208, + "step": 22892 + }, + { + "epoch": 0.94466452092102, + "grad_norm": 4.469174200820656, + "learning_rate": 2.402692888148089e-08, + "loss": 0.5008, + "step": 22893 + }, + { + "epoch": 0.944705785260378, + "grad_norm": 3.8954447331234325, + "learning_rate": 2.3991203691397844e-08, + "loss": 0.5164, + "step": 22894 + }, + { + "epoch": 0.9447470495997359, + "grad_norm": 5.54522473489428, + "learning_rate": 2.3955504866524314e-08, + "loss": 0.5574, + "step": 22895 + }, + { + "epoch": 0.9447883139390938, + "grad_norm": 2.56383765393397, + "learning_rate": 2.3919832407497953e-08, + "loss": 0.5146, + "step": 22896 + }, + { + "epoch": 0.9448295782784517, + "grad_norm": 2.260165506718505, + "learning_rate": 2.3884186314955924e-08, + "loss": 0.4707, + "step": 22897 + }, + { + "epoch": 0.9448708426178097, + "grad_norm": 2.1815475720726742, + "learning_rate": 2.384856658953505e-08, + "loss": 0.4712, + "step": 22898 + }, + { + "epoch": 0.9449121069571677, + "grad_norm": 2.7775443153867987, + "learning_rate": 2.3812973231871647e-08, + "loss": 0.4258, + "step": 22899 + }, + { + "epoch": 0.9449533712965256, + "grad_norm": 4.362841351993833, + "learning_rate": 2.3777406242601385e-08, + "loss": 0.5647, + "step": 22900 + }, + { + "epoch": 0.9449946356358835, + "grad_norm": 5.67203721453288, + "learning_rate": 2.3741865622359415e-08, + "loss": 0.4951, + "step": 22901 + }, + { + "epoch": 0.9450358999752414, + "grad_norm": 9.043537244548178, + "learning_rate": 2.37063513717809e-08, + "loss": 0.509, + "step": 22902 + }, + { + "epoch": 0.9450771643145993, + "grad_norm": 2.9075632982095447, + "learning_rate": 2.3670863491499993e-08, + "loss": 0.4832, + "step": 22903 + }, + { + "epoch": 0.9451184286539572, + "grad_norm": 2.966673656214846, + "learning_rate": 2.363540198215086e-08, + "loss": 0.5214, + "step": 22904 + }, + { + "epoch": 0.9451596929933151, + "grad_norm": 2.1678265222174256, + "learning_rate": 2.3599966844366493e-08, + "loss": 0.5178, + "step": 22905 + }, + { + "epoch": 0.9452009573326731, + "grad_norm": 3.2423043249274763, + "learning_rate": 2.356455807878022e-08, + "loss": 0.5109, + "step": 22906 + }, + { + "epoch": 0.945242221672031, + "grad_norm": 3.881835920510371, + "learning_rate": 2.3529175686024363e-08, + "loss": 0.5369, + "step": 22907 + }, + { + "epoch": 0.945283486011389, + "grad_norm": 3.6441090742144633, + "learning_rate": 2.3493819666731086e-08, + "loss": 0.4916, + "step": 22908 + }, + { + "epoch": 0.9453247503507469, + "grad_norm": 3.0890618454613397, + "learning_rate": 2.345849002153172e-08, + "loss": 0.4603, + "step": 22909 + }, + { + "epoch": 0.9453660146901048, + "grad_norm": 2.430698622754174, + "learning_rate": 2.3423186751057425e-08, + "loss": 0.5864, + "step": 22910 + }, + { + "epoch": 0.9454072790294628, + "grad_norm": 3.0580545178669447, + "learning_rate": 2.3387909855939192e-08, + "loss": 0.5308, + "step": 22911 + }, + { + "epoch": 0.9454485433688207, + "grad_norm": 3.7800969196652807, + "learning_rate": 2.3352659336806526e-08, + "loss": 0.5209, + "step": 22912 + }, + { + "epoch": 0.9454898077081786, + "grad_norm": 3.597664465100703, + "learning_rate": 2.3317435194289416e-08, + "loss": 0.5062, + "step": 22913 + }, + { + "epoch": 0.9455310720475365, + "grad_norm": 2.986653500297222, + "learning_rate": 2.328223742901703e-08, + "loss": 0.5283, + "step": 22914 + }, + { + "epoch": 0.9455723363868944, + "grad_norm": 5.1171686859839, + "learning_rate": 2.324706604161786e-08, + "loss": 0.5301, + "step": 22915 + }, + { + "epoch": 0.9456136007262523, + "grad_norm": 2.4138168346545297, + "learning_rate": 2.321192103272074e-08, + "loss": 0.4451, + "step": 22916 + }, + { + "epoch": 0.9456548650656103, + "grad_norm": 8.33115547149913, + "learning_rate": 2.3176802402952836e-08, + "loss": 0.5102, + "step": 22917 + }, + { + "epoch": 0.9456961294049683, + "grad_norm": 2.9196325782580232, + "learning_rate": 2.3141710152941642e-08, + "loss": 0.5396, + "step": 22918 + }, + { + "epoch": 0.9457373937443262, + "grad_norm": 15.389444568356373, + "learning_rate": 2.3106644283313994e-08, + "loss": 0.497, + "step": 22919 + }, + { + "epoch": 0.9457786580836841, + "grad_norm": 6.928280921624014, + "learning_rate": 2.3071604794696554e-08, + "loss": 0.4491, + "step": 22920 + }, + { + "epoch": 0.945819922423042, + "grad_norm": 5.580052546342307, + "learning_rate": 2.3036591687714826e-08, + "loss": 0.5101, + "step": 22921 + }, + { + "epoch": 0.9458611867623999, + "grad_norm": 2.393675131877834, + "learning_rate": 2.3001604962994305e-08, + "loss": 0.5431, + "step": 22922 + }, + { + "epoch": 0.9459024511017579, + "grad_norm": 3.4740724911955505, + "learning_rate": 2.2966644621159994e-08, + "loss": 0.4624, + "step": 22923 + }, + { + "epoch": 0.9459437154411158, + "grad_norm": 6.0864706890411835, + "learning_rate": 2.2931710662836557e-08, + "loss": 0.5222, + "step": 22924 + }, + { + "epoch": 0.9459849797804737, + "grad_norm": 2.950214284314569, + "learning_rate": 2.2896803088647666e-08, + "loss": 0.521, + "step": 22925 + }, + { + "epoch": 0.9460262441198316, + "grad_norm": 2.5460346948122647, + "learning_rate": 2.2861921899217153e-08, + "loss": 0.4511, + "step": 22926 + }, + { + "epoch": 0.9460675084591895, + "grad_norm": 2.423146825774206, + "learning_rate": 2.2827067095167852e-08, + "loss": 0.5457, + "step": 22927 + }, + { + "epoch": 0.9461087727985475, + "grad_norm": 12.847765192348566, + "learning_rate": 2.27922386771226e-08, + "loss": 0.5111, + "step": 22928 + }, + { + "epoch": 0.9461500371379055, + "grad_norm": 4.254610725379982, + "learning_rate": 2.2757436645703234e-08, + "loss": 0.4665, + "step": 22929 + }, + { + "epoch": 0.9461913014772634, + "grad_norm": 9.753227597752236, + "learning_rate": 2.272266100153142e-08, + "loss": 0.5326, + "step": 22930 + }, + { + "epoch": 0.9462325658166213, + "grad_norm": 2.926367470265388, + "learning_rate": 2.2687911745228662e-08, + "loss": 0.4729, + "step": 22931 + }, + { + "epoch": 0.9462738301559792, + "grad_norm": 6.1139477063633585, + "learning_rate": 2.2653188877415465e-08, + "loss": 0.487, + "step": 22932 + }, + { + "epoch": 0.9463150944953371, + "grad_norm": 2.608554299693871, + "learning_rate": 2.2618492398712e-08, + "loss": 0.5354, + "step": 22933 + }, + { + "epoch": 0.946356358834695, + "grad_norm": 2.9750260769509635, + "learning_rate": 2.25838223097381e-08, + "loss": 0.4668, + "step": 22934 + }, + { + "epoch": 0.946397623174053, + "grad_norm": 2.820128325197483, + "learning_rate": 2.2549178611112943e-08, + "loss": 0.548, + "step": 22935 + }, + { + "epoch": 0.9464388875134109, + "grad_norm": 2.4993782565452825, + "learning_rate": 2.2514561303455693e-08, + "loss": 0.5091, + "step": 22936 + }, + { + "epoch": 0.9464801518527688, + "grad_norm": 3.525165235688651, + "learning_rate": 2.2479970387384365e-08, + "loss": 0.5644, + "step": 22937 + }, + { + "epoch": 0.9465214161921268, + "grad_norm": 10.604891172089703, + "learning_rate": 2.2445405863516953e-08, + "loss": 0.5589, + "step": 22938 + }, + { + "epoch": 0.9465626805314847, + "grad_norm": 3.210347290201476, + "learning_rate": 2.2410867732470807e-08, + "loss": 0.4976, + "step": 22939 + }, + { + "epoch": 0.9466039448708427, + "grad_norm": 2.980140782746058, + "learning_rate": 2.2376355994863095e-08, + "loss": 0.5175, + "step": 22940 + }, + { + "epoch": 0.9466452092102006, + "grad_norm": 2.7123620790924234, + "learning_rate": 2.234187065131016e-08, + "loss": 0.5097, + "step": 22941 + }, + { + "epoch": 0.9466864735495585, + "grad_norm": 5.7575295254286045, + "learning_rate": 2.2307411702427838e-08, + "loss": 0.5092, + "step": 22942 + }, + { + "epoch": 0.9467277378889164, + "grad_norm": 2.871988597085248, + "learning_rate": 2.227297914883164e-08, + "loss": 0.4703, + "step": 22943 + }, + { + "epoch": 0.9467690022282743, + "grad_norm": 2.9212150489508906, + "learning_rate": 2.2238572991136906e-08, + "loss": 0.4681, + "step": 22944 + }, + { + "epoch": 0.9468102665676322, + "grad_norm": 3.476163484724671, + "learning_rate": 2.220419322995798e-08, + "loss": 0.4984, + "step": 22945 + }, + { + "epoch": 0.9468515309069901, + "grad_norm": 9.488959448852356, + "learning_rate": 2.2169839865909038e-08, + "loss": 0.5461, + "step": 22946 + }, + { + "epoch": 0.9468927952463481, + "grad_norm": 3.9643819686249064, + "learning_rate": 2.213551289960375e-08, + "loss": 0.5336, + "step": 22947 + }, + { + "epoch": 0.946934059585706, + "grad_norm": 2.265387780788884, + "learning_rate": 2.2101212331655296e-08, + "loss": 0.4833, + "step": 22948 + }, + { + "epoch": 0.946975323925064, + "grad_norm": 1.9750975024522512, + "learning_rate": 2.2066938162676187e-08, + "loss": 0.4813, + "step": 22949 + }, + { + "epoch": 0.9470165882644219, + "grad_norm": 2.9104288582153797, + "learning_rate": 2.2032690393278933e-08, + "loss": 0.4974, + "step": 22950 + }, + { + "epoch": 0.9470578526037798, + "grad_norm": 23.266018949813628, + "learning_rate": 2.1998469024075042e-08, + "loss": 0.4784, + "step": 22951 + }, + { + "epoch": 0.9470991169431378, + "grad_norm": 4.0512630320777285, + "learning_rate": 2.1964274055676027e-08, + "loss": 0.4567, + "step": 22952 + }, + { + "epoch": 0.9471403812824957, + "grad_norm": 3.1173767627175395, + "learning_rate": 2.19301054886924e-08, + "loss": 0.4577, + "step": 22953 + }, + { + "epoch": 0.9471816456218536, + "grad_norm": 3.199036384601682, + "learning_rate": 2.189596332373467e-08, + "loss": 0.4875, + "step": 22954 + }, + { + "epoch": 0.9472229099612115, + "grad_norm": 3.4249172635306784, + "learning_rate": 2.1861847561412685e-08, + "loss": 0.4977, + "step": 22955 + }, + { + "epoch": 0.9472641743005694, + "grad_norm": 2.4795846414420315, + "learning_rate": 2.1827758202335956e-08, + "loss": 0.5405, + "step": 22956 + }, + { + "epoch": 0.9473054386399273, + "grad_norm": 4.006767939636905, + "learning_rate": 2.1793695247113166e-08, + "loss": 0.5167, + "step": 22957 + }, + { + "epoch": 0.9473467029792852, + "grad_norm": 7.186622891295582, + "learning_rate": 2.175965869635299e-08, + "loss": 0.5318, + "step": 22958 + }, + { + "epoch": 0.9473879673186433, + "grad_norm": 6.099920297704864, + "learning_rate": 2.1725648550663113e-08, + "loss": 0.4921, + "step": 22959 + }, + { + "epoch": 0.9474292316580012, + "grad_norm": 2.5250826407211355, + "learning_rate": 2.169166481065138e-08, + "loss": 0.5246, + "step": 22960 + }, + { + "epoch": 0.9474704959973591, + "grad_norm": 3.2289393253187613, + "learning_rate": 2.1657707476924638e-08, + "loss": 0.5116, + "step": 22961 + }, + { + "epoch": 0.947511760336717, + "grad_norm": 3.2609674724442645, + "learning_rate": 2.16237765500894e-08, + "loss": 0.485, + "step": 22962 + }, + { + "epoch": 0.9475530246760749, + "grad_norm": 11.468892136410519, + "learning_rate": 2.1589872030752023e-08, + "loss": 0.4955, + "step": 22963 + }, + { + "epoch": 0.9475942890154329, + "grad_norm": 7.81450326443325, + "learning_rate": 2.1555993919517847e-08, + "loss": 0.4712, + "step": 22964 + }, + { + "epoch": 0.9476355533547908, + "grad_norm": 3.338399365919989, + "learning_rate": 2.1522142216992058e-08, + "loss": 0.4802, + "step": 22965 + }, + { + "epoch": 0.9476768176941487, + "grad_norm": 1.9903105776932268, + "learning_rate": 2.1488316923779673e-08, + "loss": 0.474, + "step": 22966 + }, + { + "epoch": 0.9477180820335066, + "grad_norm": 3.1996229790759294, + "learning_rate": 2.145451804048437e-08, + "loss": 0.5006, + "step": 22967 + }, + { + "epoch": 0.9477593463728645, + "grad_norm": 3.670155778581536, + "learning_rate": 2.142074556771001e-08, + "loss": 0.5578, + "step": 22968 + }, + { + "epoch": 0.9478006107122225, + "grad_norm": 2.117127430076202, + "learning_rate": 2.138699950606027e-08, + "loss": 0.4624, + "step": 22969 + }, + { + "epoch": 0.9478418750515805, + "grad_norm": 11.212276020450794, + "learning_rate": 2.1353279856137498e-08, + "loss": 0.4989, + "step": 22970 + }, + { + "epoch": 0.9478831393909384, + "grad_norm": 3.0233533028677093, + "learning_rate": 2.131958661854405e-08, + "loss": 0.526, + "step": 22971 + }, + { + "epoch": 0.9479244037302963, + "grad_norm": 6.555129120663698, + "learning_rate": 2.1285919793881948e-08, + "loss": 0.4888, + "step": 22972 + }, + { + "epoch": 0.9479656680696542, + "grad_norm": 4.00740690878883, + "learning_rate": 2.1252279382752706e-08, + "loss": 0.5154, + "step": 22973 + }, + { + "epoch": 0.9480069324090121, + "grad_norm": 5.949178825366067, + "learning_rate": 2.1218665385756842e-08, + "loss": 0.4795, + "step": 22974 + }, + { + "epoch": 0.94804819674837, + "grad_norm": 10.827804375477633, + "learning_rate": 2.118507780349488e-08, + "loss": 0.5148, + "step": 22975 + }, + { + "epoch": 0.948089461087728, + "grad_norm": 2.416388769149692, + "learning_rate": 2.1151516636566837e-08, + "loss": 0.5633, + "step": 22976 + }, + { + "epoch": 0.9481307254270859, + "grad_norm": 3.3022622805120565, + "learning_rate": 2.1117981885572402e-08, + "loss": 0.4891, + "step": 22977 + }, + { + "epoch": 0.9481719897664438, + "grad_norm": 6.836964015916818, + "learning_rate": 2.108447355111026e-08, + "loss": 0.5242, + "step": 22978 + }, + { + "epoch": 0.9482132541058018, + "grad_norm": 2.061534534833206, + "learning_rate": 2.1050991633779272e-08, + "loss": 0.5311, + "step": 22979 + }, + { + "epoch": 0.9482545184451597, + "grad_norm": 2.437018972741492, + "learning_rate": 2.1017536134177286e-08, + "loss": 0.4969, + "step": 22980 + }, + { + "epoch": 0.9482957827845176, + "grad_norm": 17.88738484196193, + "learning_rate": 2.0984107052901825e-08, + "loss": 0.4444, + "step": 22981 + }, + { + "epoch": 0.9483370471238756, + "grad_norm": 3.554942377915718, + "learning_rate": 2.0950704390550248e-08, + "loss": 0.4694, + "step": 22982 + }, + { + "epoch": 0.9483783114632335, + "grad_norm": 2.3047118039460215, + "learning_rate": 2.0917328147719072e-08, + "loss": 0.4952, + "step": 22983 + }, + { + "epoch": 0.9484195758025914, + "grad_norm": 4.586653491047639, + "learning_rate": 2.0883978325004494e-08, + "loss": 0.5207, + "step": 22984 + }, + { + "epoch": 0.9484608401419493, + "grad_norm": 3.8626425547959267, + "learning_rate": 2.08506549230022e-08, + "loss": 0.5081, + "step": 22985 + }, + { + "epoch": 0.9485021044813072, + "grad_norm": 3.5703880527286542, + "learning_rate": 2.0817357942307546e-08, + "loss": 0.5073, + "step": 22986 + }, + { + "epoch": 0.9485433688206651, + "grad_norm": 1.9610022442880792, + "learning_rate": 2.0784087383515228e-08, + "loss": 0.5198, + "step": 22987 + }, + { + "epoch": 0.9485846331600231, + "grad_norm": 3.5524039570519097, + "learning_rate": 2.075084324721943e-08, + "loss": 0.4603, + "step": 22988 + }, + { + "epoch": 0.9486258974993811, + "grad_norm": 3.787920380910409, + "learning_rate": 2.071762553401435e-08, + "loss": 0.5803, + "step": 22989 + }, + { + "epoch": 0.948667161838739, + "grad_norm": 5.327219112049381, + "learning_rate": 2.0684434244492845e-08, + "loss": 0.5053, + "step": 22990 + }, + { + "epoch": 0.9487084261780969, + "grad_norm": 3.394637074507765, + "learning_rate": 2.0651269379248107e-08, + "loss": 0.4938, + "step": 22991 + }, + { + "epoch": 0.9487496905174548, + "grad_norm": 2.3524233589756975, + "learning_rate": 2.0618130938872493e-08, + "loss": 0.5081, + "step": 22992 + }, + { + "epoch": 0.9487909548568128, + "grad_norm": 2.601899720853969, + "learning_rate": 2.0585018923957867e-08, + "loss": 0.4697, + "step": 22993 + }, + { + "epoch": 0.9488322191961707, + "grad_norm": 3.4700446215157457, + "learning_rate": 2.0551933335095753e-08, + "loss": 0.5395, + "step": 22994 + }, + { + "epoch": 0.9488734835355286, + "grad_norm": 2.8282567572969417, + "learning_rate": 2.0518874172877012e-08, + "loss": 0.5008, + "step": 22995 + }, + { + "epoch": 0.9489147478748865, + "grad_norm": 8.588545287404683, + "learning_rate": 2.0485841437892338e-08, + "loss": 0.5166, + "step": 22996 + }, + { + "epoch": 0.9489560122142444, + "grad_norm": 3.1452118839765255, + "learning_rate": 2.045283513073176e-08, + "loss": 0.4491, + "step": 22997 + }, + { + "epoch": 0.9489972765536023, + "grad_norm": 2.6577724028921095, + "learning_rate": 2.0419855251984808e-08, + "loss": 0.5559, + "step": 22998 + }, + { + "epoch": 0.9490385408929604, + "grad_norm": 3.0075064790808788, + "learning_rate": 2.038690180224051e-08, + "loss": 0.5246, + "step": 22999 + }, + { + "epoch": 0.9490798052323183, + "grad_norm": 2.8748657842605367, + "learning_rate": 2.035397478208756e-08, + "loss": 0.5102, + "step": 23000 + }, + { + "epoch": 0.9491210695716762, + "grad_norm": 2.269964941400488, + "learning_rate": 2.0321074192114154e-08, + "loss": 0.5168, + "step": 23001 + }, + { + "epoch": 0.9491623339110341, + "grad_norm": 3.61787541649731, + "learning_rate": 2.0288200032907988e-08, + "loss": 0.484, + "step": 23002 + }, + { + "epoch": 0.949203598250392, + "grad_norm": 2.2649923826802345, + "learning_rate": 2.0255352305056264e-08, + "loss": 0.5289, + "step": 23003 + }, + { + "epoch": 0.9492448625897499, + "grad_norm": 2.683212518617289, + "learning_rate": 2.0222531009145674e-08, + "loss": 0.48, + "step": 23004 + }, + { + "epoch": 0.9492861269291079, + "grad_norm": 4.5086560100581625, + "learning_rate": 2.0189736145762584e-08, + "loss": 0.5799, + "step": 23005 + }, + { + "epoch": 0.9493273912684658, + "grad_norm": 3.9104716124952663, + "learning_rate": 2.015696771549269e-08, + "loss": 0.5312, + "step": 23006 + }, + { + "epoch": 0.9493686556078237, + "grad_norm": 2.8928153607299576, + "learning_rate": 2.0124225718921362e-08, + "loss": 0.5238, + "step": 23007 + }, + { + "epoch": 0.9494099199471816, + "grad_norm": 2.1429560332773403, + "learning_rate": 2.0091510156633298e-08, + "loss": 0.4816, + "step": 23008 + }, + { + "epoch": 0.9494511842865395, + "grad_norm": 12.018692181238974, + "learning_rate": 2.0058821029213025e-08, + "loss": 0.5321, + "step": 23009 + }, + { + "epoch": 0.9494924486258975, + "grad_norm": 2.6402280158193054, + "learning_rate": 2.002615833724475e-08, + "loss": 0.5097, + "step": 23010 + }, + { + "epoch": 0.9495337129652555, + "grad_norm": 2.6184001078381667, + "learning_rate": 1.999352208131133e-08, + "loss": 0.4672, + "step": 23011 + }, + { + "epoch": 0.9495749773046134, + "grad_norm": 2.299218256004033, + "learning_rate": 1.9960912261996146e-08, + "loss": 0.5313, + "step": 23012 + }, + { + "epoch": 0.9496162416439713, + "grad_norm": 2.9543287316802203, + "learning_rate": 1.9928328879881385e-08, + "loss": 0.5738, + "step": 23013 + }, + { + "epoch": 0.9496575059833292, + "grad_norm": 2.066223217664468, + "learning_rate": 1.9895771935549424e-08, + "loss": 0.5248, + "step": 23014 + }, + { + "epoch": 0.9496987703226871, + "grad_norm": 3.083586148240414, + "learning_rate": 1.9863241429581625e-08, + "loss": 0.4916, + "step": 23015 + }, + { + "epoch": 0.949740034662045, + "grad_norm": 5.399639596933253, + "learning_rate": 1.9830737362559026e-08, + "loss": 0.5259, + "step": 23016 + }, + { + "epoch": 0.949781299001403, + "grad_norm": 1.8972516161714146, + "learning_rate": 1.9798259735062163e-08, + "loss": 0.489, + "step": 23017 + }, + { + "epoch": 0.9498225633407609, + "grad_norm": 1.923564859446371, + "learning_rate": 1.976580854767157e-08, + "loss": 0.4925, + "step": 23018 + }, + { + "epoch": 0.9498638276801188, + "grad_norm": 2.1840607617404624, + "learning_rate": 1.9733383800966453e-08, + "loss": 0.5552, + "step": 23019 + }, + { + "epoch": 0.9499050920194768, + "grad_norm": 5.0828912534606605, + "learning_rate": 1.970098549552618e-08, + "loss": 0.5606, + "step": 23020 + }, + { + "epoch": 0.9499463563588347, + "grad_norm": 3.7650583492981116, + "learning_rate": 1.966861363192929e-08, + "loss": 0.5159, + "step": 23021 + }, + { + "epoch": 0.9499876206981926, + "grad_norm": 3.5698899451872155, + "learning_rate": 1.9636268210754484e-08, + "loss": 0.4895, + "step": 23022 + }, + { + "epoch": 0.9500288850375506, + "grad_norm": 2.3342591098370202, + "learning_rate": 1.9603949232578967e-08, + "loss": 0.4497, + "step": 23023 + }, + { + "epoch": 0.9500701493769085, + "grad_norm": 2.405423215890499, + "learning_rate": 1.957165669798028e-08, + "loss": 0.5255, + "step": 23024 + }, + { + "epoch": 0.9501114137162664, + "grad_norm": 3.2841693184675282, + "learning_rate": 1.9539390607535456e-08, + "loss": 0.5276, + "step": 23025 + }, + { + "epoch": 0.9501526780556243, + "grad_norm": 5.887026513997106, + "learning_rate": 1.950715096182054e-08, + "loss": 0.504, + "step": 23026 + }, + { + "epoch": 0.9501939423949822, + "grad_norm": 3.049889173626785, + "learning_rate": 1.9474937761411572e-08, + "loss": 0.4652, + "step": 23027 + }, + { + "epoch": 0.9502352067343401, + "grad_norm": 3.5152809217854926, + "learning_rate": 1.944275100688392e-08, + "loss": 0.4926, + "step": 23028 + }, + { + "epoch": 0.9502764710736981, + "grad_norm": 7.178553479753827, + "learning_rate": 1.9410590698812458e-08, + "loss": 0.4937, + "step": 23029 + }, + { + "epoch": 0.9503177354130561, + "grad_norm": 2.428874011425344, + "learning_rate": 1.9378456837771896e-08, + "loss": 0.505, + "step": 23030 + }, + { + "epoch": 0.950358999752414, + "grad_norm": 4.658821662199498, + "learning_rate": 1.9346349424335775e-08, + "loss": 0.5393, + "step": 23031 + }, + { + "epoch": 0.9504002640917719, + "grad_norm": 2.108595550195085, + "learning_rate": 1.9314268459078132e-08, + "loss": 0.4896, + "step": 23032 + }, + { + "epoch": 0.9504415284311298, + "grad_norm": 3.706225101910133, + "learning_rate": 1.928221394257168e-08, + "loss": 0.4752, + "step": 23033 + }, + { + "epoch": 0.9504827927704877, + "grad_norm": 10.435528830780411, + "learning_rate": 1.925018587538896e-08, + "loss": 0.558, + "step": 23034 + }, + { + "epoch": 0.9505240571098457, + "grad_norm": 2.5805585592467515, + "learning_rate": 1.9218184258102512e-08, + "loss": 0.5086, + "step": 23035 + }, + { + "epoch": 0.9505653214492036, + "grad_norm": 8.563925126357162, + "learning_rate": 1.918620909128338e-08, + "loss": 0.4247, + "step": 23036 + }, + { + "epoch": 0.9506065857885615, + "grad_norm": 3.7367158041571704, + "learning_rate": 1.915426037550294e-08, + "loss": 0.529, + "step": 23037 + }, + { + "epoch": 0.9506478501279194, + "grad_norm": 21.935385241072023, + "learning_rate": 1.9122338111331906e-08, + "loss": 0.5227, + "step": 23038 + }, + { + "epoch": 0.9506891144672773, + "grad_norm": 3.222286476019881, + "learning_rate": 1.909044229934065e-08, + "loss": 0.5338, + "step": 23039 + }, + { + "epoch": 0.9507303788066354, + "grad_norm": 3.866230562714833, + "learning_rate": 1.905857294009872e-08, + "loss": 0.4726, + "step": 23040 + }, + { + "epoch": 0.9507716431459933, + "grad_norm": 3.597296651242423, + "learning_rate": 1.902673003417532e-08, + "loss": 0.4464, + "step": 23041 + }, + { + "epoch": 0.9508129074853512, + "grad_norm": 2.775837520593651, + "learning_rate": 1.8994913582139508e-08, + "loss": 0.5074, + "step": 23042 + }, + { + "epoch": 0.9508541718247091, + "grad_norm": 2.821514724993941, + "learning_rate": 1.896312358455948e-08, + "loss": 0.5333, + "step": 23043 + }, + { + "epoch": 0.950895436164067, + "grad_norm": 5.044163061632213, + "learning_rate": 1.89313600420028e-08, + "loss": 0.5168, + "step": 23044 + }, + { + "epoch": 0.9509367005034249, + "grad_norm": 3.8189348819756033, + "learning_rate": 1.8899622955037333e-08, + "loss": 0.5333, + "step": 23045 + }, + { + "epoch": 0.9509779648427829, + "grad_norm": 6.069354998160971, + "learning_rate": 1.886791232422963e-08, + "loss": 0.4698, + "step": 23046 + }, + { + "epoch": 0.9510192291821408, + "grad_norm": 3.234327887241388, + "learning_rate": 1.8836228150146407e-08, + "loss": 0.4967, + "step": 23047 + }, + { + "epoch": 0.9510604935214987, + "grad_norm": 8.223093874602936, + "learning_rate": 1.8804570433353207e-08, + "loss": 0.4681, + "step": 23048 + }, + { + "epoch": 0.9511017578608566, + "grad_norm": 2.246176485637714, + "learning_rate": 1.8772939174415915e-08, + "loss": 0.4955, + "step": 23049 + }, + { + "epoch": 0.9511430222002146, + "grad_norm": 2.2980063147991174, + "learning_rate": 1.8741334373899245e-08, + "loss": 0.5233, + "step": 23050 + }, + { + "epoch": 0.9511842865395725, + "grad_norm": 7.1729086654906995, + "learning_rate": 1.8709756032368075e-08, + "loss": 0.4231, + "step": 23051 + }, + { + "epoch": 0.9512255508789305, + "grad_norm": 5.4326166975532395, + "learning_rate": 1.8678204150386123e-08, + "loss": 0.535, + "step": 23052 + }, + { + "epoch": 0.9512668152182884, + "grad_norm": 2.4447364624883225, + "learning_rate": 1.8646678728517273e-08, + "loss": 0.4706, + "step": 23053 + }, + { + "epoch": 0.9513080795576463, + "grad_norm": 2.641201048551467, + "learning_rate": 1.861517976732441e-08, + "loss": 0.5064, + "step": 23054 + }, + { + "epoch": 0.9513493438970042, + "grad_norm": 2.0890510202645793, + "learning_rate": 1.858370726737041e-08, + "loss": 0.4918, + "step": 23055 + }, + { + "epoch": 0.9513906082363621, + "grad_norm": 6.5413398916852525, + "learning_rate": 1.855226122921716e-08, + "loss": 0.5139, + "step": 23056 + }, + { + "epoch": 0.95143187257572, + "grad_norm": 4.087709704238964, + "learning_rate": 1.8520841653426712e-08, + "loss": 0.5582, + "step": 23057 + }, + { + "epoch": 0.951473136915078, + "grad_norm": 2.398443161959644, + "learning_rate": 1.8489448540560116e-08, + "loss": 0.4896, + "step": 23058 + }, + { + "epoch": 0.9515144012544359, + "grad_norm": 2.4962810465879497, + "learning_rate": 1.8458081891178093e-08, + "loss": 0.4958, + "step": 23059 + }, + { + "epoch": 0.9515556655937939, + "grad_norm": 4.064375892384432, + "learning_rate": 1.8426741705841022e-08, + "loss": 0.5435, + "step": 23060 + }, + { + "epoch": 0.9515969299331518, + "grad_norm": 2.614016673260458, + "learning_rate": 1.8395427985108626e-08, + "loss": 0.5415, + "step": 23061 + }, + { + "epoch": 0.9516381942725097, + "grad_norm": 2.3560135115898, + "learning_rate": 1.8364140729540124e-08, + "loss": 0.5209, + "step": 23062 + }, + { + "epoch": 0.9516794586118676, + "grad_norm": 2.206886123183092, + "learning_rate": 1.83328799396949e-08, + "loss": 0.5433, + "step": 23063 + }, + { + "epoch": 0.9517207229512256, + "grad_norm": 3.3261686296456556, + "learning_rate": 1.830164561613068e-08, + "loss": 0.5459, + "step": 23064 + }, + { + "epoch": 0.9517619872905835, + "grad_norm": 4.14442638596814, + "learning_rate": 1.8270437759405845e-08, + "loss": 0.557, + "step": 23065 + }, + { + "epoch": 0.9518032516299414, + "grad_norm": 2.3899060946214936, + "learning_rate": 1.8239256370077785e-08, + "loss": 0.5072, + "step": 23066 + }, + { + "epoch": 0.9518445159692993, + "grad_norm": 3.1061069575961393, + "learning_rate": 1.8208101448703385e-08, + "loss": 0.4916, + "step": 23067 + }, + { + "epoch": 0.9518857803086572, + "grad_norm": 2.3600995159271276, + "learning_rate": 1.8176972995839036e-08, + "loss": 0.5411, + "step": 23068 + }, + { + "epoch": 0.9519270446480151, + "grad_norm": 2.480666549828047, + "learning_rate": 1.814587101204096e-08, + "loss": 0.5098, + "step": 23069 + }, + { + "epoch": 0.9519683089873731, + "grad_norm": 5.574398480705612, + "learning_rate": 1.8114795497864712e-08, + "loss": 0.5218, + "step": 23070 + }, + { + "epoch": 0.9520095733267311, + "grad_norm": 3.1576365632381096, + "learning_rate": 1.8083746453865347e-08, + "loss": 0.4356, + "step": 23071 + }, + { + "epoch": 0.952050837666089, + "grad_norm": 2.640780805471224, + "learning_rate": 1.8052723880597423e-08, + "loss": 0.4984, + "step": 23072 + }, + { + "epoch": 0.9520921020054469, + "grad_norm": 3.7251672497877704, + "learning_rate": 1.802172777861516e-08, + "loss": 0.5283, + "step": 23073 + }, + { + "epoch": 0.9521333663448048, + "grad_norm": 1.9834025493236875, + "learning_rate": 1.7990758148472118e-08, + "loss": 0.466, + "step": 23074 + }, + { + "epoch": 0.9521746306841627, + "grad_norm": 11.520411672921048, + "learning_rate": 1.7959814990721522e-08, + "loss": 0.478, + "step": 23075 + }, + { + "epoch": 0.9522158950235207, + "grad_norm": 13.609700521802543, + "learning_rate": 1.79288983059161e-08, + "loss": 0.5034, + "step": 23076 + }, + { + "epoch": 0.9522571593628786, + "grad_norm": 2.21660785977079, + "learning_rate": 1.7898008094608232e-08, + "loss": 0.4419, + "step": 23077 + }, + { + "epoch": 0.9522984237022365, + "grad_norm": 2.8738622564553475, + "learning_rate": 1.7867144357349486e-08, + "loss": 0.5247, + "step": 23078 + }, + { + "epoch": 0.9523396880415944, + "grad_norm": 4.153301981730175, + "learning_rate": 1.7836307094691418e-08, + "loss": 0.5421, + "step": 23079 + }, + { + "epoch": 0.9523809523809523, + "grad_norm": 3.257871784198484, + "learning_rate": 1.7805496307184587e-08, + "loss": 0.5194, + "step": 23080 + }, + { + "epoch": 0.9524222167203104, + "grad_norm": 2.241303615208164, + "learning_rate": 1.7774711995379557e-08, + "loss": 0.489, + "step": 23081 + }, + { + "epoch": 0.9524634810596683, + "grad_norm": 4.548152255464369, + "learning_rate": 1.7743954159826046e-08, + "loss": 0.5128, + "step": 23082 + }, + { + "epoch": 0.9525047453990262, + "grad_norm": 4.291342806536726, + "learning_rate": 1.7713222801073624e-08, + "loss": 0.5053, + "step": 23083 + }, + { + "epoch": 0.9525460097383841, + "grad_norm": 1.977721807542576, + "learning_rate": 1.7682517919671183e-08, + "loss": 0.5068, + "step": 23084 + }, + { + "epoch": 0.952587274077742, + "grad_norm": 4.194256194435842, + "learning_rate": 1.765183951616711e-08, + "loss": 0.5184, + "step": 23085 + }, + { + "epoch": 0.9526285384170999, + "grad_norm": 4.939624195267406, + "learning_rate": 1.762118759110948e-08, + "loss": 0.5223, + "step": 23086 + }, + { + "epoch": 0.9526698027564579, + "grad_norm": 2.1154277218094926, + "learning_rate": 1.7590562145045675e-08, + "loss": 0.5088, + "step": 23087 + }, + { + "epoch": 0.9527110670958158, + "grad_norm": 5.649305520684141, + "learning_rate": 1.755996317852293e-08, + "loss": 0.4619, + "step": 23088 + }, + { + "epoch": 0.9527523314351737, + "grad_norm": 1.9477806649831464, + "learning_rate": 1.752939069208781e-08, + "loss": 0.522, + "step": 23089 + }, + { + "epoch": 0.9527935957745316, + "grad_norm": 18.77509743599223, + "learning_rate": 1.7498844686286208e-08, + "loss": 0.4479, + "step": 23090 + }, + { + "epoch": 0.9528348601138896, + "grad_norm": 2.4379617620946914, + "learning_rate": 1.7468325161663857e-08, + "loss": 0.4844, + "step": 23091 + }, + { + "epoch": 0.9528761244532475, + "grad_norm": 2.751804432192579, + "learning_rate": 1.7437832118766152e-08, + "loss": 0.4969, + "step": 23092 + }, + { + "epoch": 0.9529173887926055, + "grad_norm": 2.6725004814575124, + "learning_rate": 1.740736555813749e-08, + "loss": 0.4901, + "step": 23093 + }, + { + "epoch": 0.9529586531319634, + "grad_norm": 2.4376109548795832, + "learning_rate": 1.7376925480321937e-08, + "loss": 0.5012, + "step": 23094 + }, + { + "epoch": 0.9529999174713213, + "grad_norm": 2.8706964868135962, + "learning_rate": 1.7346511885863726e-08, + "loss": 0.4275, + "step": 23095 + }, + { + "epoch": 0.9530411818106792, + "grad_norm": 2.4162989953293383, + "learning_rate": 1.731612477530575e-08, + "loss": 0.4879, + "step": 23096 + }, + { + "epoch": 0.9530824461500371, + "grad_norm": 6.670601559976347, + "learning_rate": 1.7285764149190918e-08, + "loss": 0.5396, + "step": 23097 + }, + { + "epoch": 0.953123710489395, + "grad_norm": 3.512537928229883, + "learning_rate": 1.7255430008061622e-08, + "loss": 0.5632, + "step": 23098 + }, + { + "epoch": 0.953164974828753, + "grad_norm": 2.7418691593989664, + "learning_rate": 1.7225122352459432e-08, + "loss": 0.5217, + "step": 23099 + }, + { + "epoch": 0.9532062391681109, + "grad_norm": 2.2629399710044265, + "learning_rate": 1.7194841182925912e-08, + "loss": 0.5395, + "step": 23100 + }, + { + "epoch": 0.9532475035074689, + "grad_norm": 3.0226489776589505, + "learning_rate": 1.7164586500001966e-08, + "loss": 0.5374, + "step": 23101 + }, + { + "epoch": 0.9532887678468268, + "grad_norm": 3.516172215590773, + "learning_rate": 1.713435830422799e-08, + "loss": 0.5166, + "step": 23102 + }, + { + "epoch": 0.9533300321861847, + "grad_norm": 5.984553127413381, + "learning_rate": 1.7104156596143895e-08, + "loss": 0.5324, + "step": 23103 + }, + { + "epoch": 0.9533712965255426, + "grad_norm": 3.5158694599602187, + "learning_rate": 1.7073981376289237e-08, + "loss": 0.5044, + "step": 23104 + }, + { + "epoch": 0.9534125608649006, + "grad_norm": 2.484227409514465, + "learning_rate": 1.7043832645203094e-08, + "loss": 0.4671, + "step": 23105 + }, + { + "epoch": 0.9534538252042585, + "grad_norm": 3.1957881951043343, + "learning_rate": 1.70137104034237e-08, + "loss": 0.5273, + "step": 23106 + }, + { + "epoch": 0.9534950895436164, + "grad_norm": 2.95222813038921, + "learning_rate": 1.6983614651489288e-08, + "loss": 0.5217, + "step": 23107 + }, + { + "epoch": 0.9535363538829743, + "grad_norm": 10.758132458221551, + "learning_rate": 1.6953545389937763e-08, + "loss": 0.4362, + "step": 23108 + }, + { + "epoch": 0.9535776182223322, + "grad_norm": 4.949426905863817, + "learning_rate": 1.6923502619305697e-08, + "loss": 0.48, + "step": 23109 + }, + { + "epoch": 0.9536188825616901, + "grad_norm": 6.166684603333293, + "learning_rate": 1.6893486340129995e-08, + "loss": 0.509, + "step": 23110 + }, + { + "epoch": 0.9536601469010482, + "grad_norm": 1.8490697428751426, + "learning_rate": 1.686349655294672e-08, + "loss": 0.4958, + "step": 23111 + }, + { + "epoch": 0.9537014112404061, + "grad_norm": 8.401532287871913, + "learning_rate": 1.683353325829179e-08, + "loss": 0.4848, + "step": 23112 + }, + { + "epoch": 0.953742675579764, + "grad_norm": 2.3946047220488453, + "learning_rate": 1.6803596456700097e-08, + "loss": 0.4804, + "step": 23113 + }, + { + "epoch": 0.9537839399191219, + "grad_norm": 4.652599505875726, + "learning_rate": 1.6773686148706723e-08, + "loss": 0.5513, + "step": 23114 + }, + { + "epoch": 0.9538252042584798, + "grad_norm": 9.316294866636024, + "learning_rate": 1.674380233484557e-08, + "loss": 0.5494, + "step": 23115 + }, + { + "epoch": 0.9538664685978377, + "grad_norm": 3.7407652665100097, + "learning_rate": 1.6713945015650878e-08, + "loss": 0.5294, + "step": 23116 + }, + { + "epoch": 0.9539077329371957, + "grad_norm": 3.868678401565494, + "learning_rate": 1.668411419165572e-08, + "loss": 0.5468, + "step": 23117 + }, + { + "epoch": 0.9539489972765536, + "grad_norm": 5.966474656637159, + "learning_rate": 1.6654309863392837e-08, + "loss": 0.5179, + "step": 23118 + }, + { + "epoch": 0.9539902616159115, + "grad_norm": 13.351988369808733, + "learning_rate": 1.66245320313948e-08, + "loss": 0.5001, + "step": 23119 + }, + { + "epoch": 0.9540315259552694, + "grad_norm": 3.7662267491361603, + "learning_rate": 1.6594780696193356e-08, + "loss": 0.517, + "step": 23120 + }, + { + "epoch": 0.9540727902946274, + "grad_norm": 9.088739589958557, + "learning_rate": 1.6565055858320244e-08, + "loss": 0.521, + "step": 23121 + }, + { + "epoch": 0.9541140546339854, + "grad_norm": 4.676444806129956, + "learning_rate": 1.6535357518306038e-08, + "loss": 0.5368, + "step": 23122 + }, + { + "epoch": 0.9541553189733433, + "grad_norm": 23.54486935505658, + "learning_rate": 1.650568567668148e-08, + "loss": 0.5365, + "step": 23123 + }, + { + "epoch": 0.9541965833127012, + "grad_norm": 3.7322261153645657, + "learning_rate": 1.6476040333976648e-08, + "loss": 0.524, + "step": 23124 + }, + { + "epoch": 0.9542378476520591, + "grad_norm": 2.715335897724769, + "learning_rate": 1.6446421490720788e-08, + "loss": 0.5115, + "step": 23125 + }, + { + "epoch": 0.954279111991417, + "grad_norm": 5.262649394139334, + "learning_rate": 1.6416829147443137e-08, + "loss": 0.5414, + "step": 23126 + }, + { + "epoch": 0.9543203763307749, + "grad_norm": 5.614293548082732, + "learning_rate": 1.6387263304672273e-08, + "loss": 0.4695, + "step": 23127 + }, + { + "epoch": 0.9543616406701328, + "grad_norm": 2.850888812000503, + "learning_rate": 1.6357723962936278e-08, + "loss": 0.4379, + "step": 23128 + }, + { + "epoch": 0.9544029050094908, + "grad_norm": 3.7498023473057978, + "learning_rate": 1.6328211122762894e-08, + "loss": 0.4703, + "step": 23129 + }, + { + "epoch": 0.9544441693488487, + "grad_norm": 2.730132340111177, + "learning_rate": 1.6298724784679196e-08, + "loss": 0.4772, + "step": 23130 + }, + { + "epoch": 0.9544854336882066, + "grad_norm": 2.453923105545385, + "learning_rate": 1.6269264949211938e-08, + "loss": 0.5468, + "step": 23131 + }, + { + "epoch": 0.9545266980275646, + "grad_norm": 5.226500785834015, + "learning_rate": 1.6239831616887357e-08, + "loss": 0.4632, + "step": 23132 + }, + { + "epoch": 0.9545679623669225, + "grad_norm": 5.02312348653488, + "learning_rate": 1.6210424788231202e-08, + "loss": 0.5249, + "step": 23133 + }, + { + "epoch": 0.9546092267062805, + "grad_norm": 3.66103860960639, + "learning_rate": 1.6181044463768725e-08, + "loss": 0.5201, + "step": 23134 + }, + { + "epoch": 0.9546504910456384, + "grad_norm": 2.887516998039362, + "learning_rate": 1.6151690644024665e-08, + "loss": 0.5562, + "step": 23135 + }, + { + "epoch": 0.9546917553849963, + "grad_norm": 4.600353216294008, + "learning_rate": 1.6122363329523603e-08, + "loss": 0.4685, + "step": 23136 + }, + { + "epoch": 0.9547330197243542, + "grad_norm": 3.014281637214124, + "learning_rate": 1.6093062520789126e-08, + "loss": 0.5596, + "step": 23137 + }, + { + "epoch": 0.9547742840637121, + "grad_norm": 6.725166043762518, + "learning_rate": 1.606378821834481e-08, + "loss": 0.5047, + "step": 23138 + }, + { + "epoch": 0.95481554840307, + "grad_norm": 2.592277287893246, + "learning_rate": 1.6034540422713238e-08, + "loss": 0.4779, + "step": 23139 + }, + { + "epoch": 0.954856812742428, + "grad_norm": 3.039409401096358, + "learning_rate": 1.6005319134417163e-08, + "loss": 0.5169, + "step": 23140 + }, + { + "epoch": 0.9548980770817859, + "grad_norm": 25.219741111907382, + "learning_rate": 1.5976124353978662e-08, + "loss": 0.5142, + "step": 23141 + }, + { + "epoch": 0.9549393414211439, + "grad_norm": 8.484693161624719, + "learning_rate": 1.594695608191882e-08, + "loss": 0.4835, + "step": 23142 + }, + { + "epoch": 0.9549806057605018, + "grad_norm": 2.8387589593827607, + "learning_rate": 1.591781431875905e-08, + "loss": 0.5413, + "step": 23143 + }, + { + "epoch": 0.9550218700998597, + "grad_norm": 5.451541924421764, + "learning_rate": 1.5888699065019606e-08, + "loss": 0.5552, + "step": 23144 + }, + { + "epoch": 0.9550631344392176, + "grad_norm": 2.3564011257439113, + "learning_rate": 1.585961032122074e-08, + "loss": 0.4694, + "step": 23145 + }, + { + "epoch": 0.9551043987785756, + "grad_norm": 7.122104222223157, + "learning_rate": 1.5830548087881868e-08, + "loss": 0.508, + "step": 23146 + }, + { + "epoch": 0.9551456631179335, + "grad_norm": 3.79534438060534, + "learning_rate": 1.580151236552241e-08, + "loss": 0.4742, + "step": 23147 + }, + { + "epoch": 0.9551869274572914, + "grad_norm": 2.5382557655517757, + "learning_rate": 1.577250315466061e-08, + "loss": 0.5052, + "step": 23148 + }, + { + "epoch": 0.9552281917966493, + "grad_norm": 3.8924490841287924, + "learning_rate": 1.5743520455815065e-08, + "loss": 0.5411, + "step": 23149 + }, + { + "epoch": 0.9552694561360072, + "grad_norm": 2.1080819181100776, + "learning_rate": 1.571456426950335e-08, + "loss": 0.5089, + "step": 23150 + }, + { + "epoch": 0.9553107204753651, + "grad_norm": 3.1327311153111643, + "learning_rate": 1.568563459624239e-08, + "loss": 0.5479, + "step": 23151 + }, + { + "epoch": 0.9553519848147232, + "grad_norm": 10.870928901599104, + "learning_rate": 1.5656731436549276e-08, + "loss": 0.5363, + "step": 23152 + }, + { + "epoch": 0.9553932491540811, + "grad_norm": 30.155221086177416, + "learning_rate": 1.5627854790940254e-08, + "loss": 0.469, + "step": 23153 + }, + { + "epoch": 0.955434513493439, + "grad_norm": 3.0578360824759563, + "learning_rate": 1.5599004659931083e-08, + "loss": 0.5115, + "step": 23154 + }, + { + "epoch": 0.9554757778327969, + "grad_norm": 2.410996719840783, + "learning_rate": 1.5570181044037013e-08, + "loss": 0.4544, + "step": 23155 + }, + { + "epoch": 0.9555170421721548, + "grad_norm": 2.960248442363257, + "learning_rate": 1.554138394377297e-08, + "loss": 0.5423, + "step": 23156 + }, + { + "epoch": 0.9555583065115127, + "grad_norm": 3.934528831235235, + "learning_rate": 1.5512613359653373e-08, + "loss": 0.4857, + "step": 23157 + }, + { + "epoch": 0.9555995708508707, + "grad_norm": 2.2936687095635127, + "learning_rate": 1.548386929219231e-08, + "loss": 0.4869, + "step": 23158 + }, + { + "epoch": 0.9556408351902286, + "grad_norm": 12.904358945538274, + "learning_rate": 1.5455151741902705e-08, + "loss": 0.5181, + "step": 23159 + }, + { + "epoch": 0.9556820995295865, + "grad_norm": 6.134022543637652, + "learning_rate": 1.5426460709297985e-08, + "loss": 0.5169, + "step": 23160 + }, + { + "epoch": 0.9557233638689444, + "grad_norm": 7.959338928453832, + "learning_rate": 1.5397796194890735e-08, + "loss": 0.4923, + "step": 23161 + }, + { + "epoch": 0.9557646282083024, + "grad_norm": 5.485737672789276, + "learning_rate": 1.5369158199192547e-08, + "loss": 0.5021, + "step": 23162 + }, + { + "epoch": 0.9558058925476604, + "grad_norm": 4.036478658828516, + "learning_rate": 1.5340546722715178e-08, + "loss": 0.5865, + "step": 23163 + }, + { + "epoch": 0.9558471568870183, + "grad_norm": 3.624449560113894, + "learning_rate": 1.5311961765969716e-08, + "loss": 0.5637, + "step": 23164 + }, + { + "epoch": 0.9558884212263762, + "grad_norm": 2.995510559508236, + "learning_rate": 1.5283403329466595e-08, + "loss": 0.5927, + "step": 23165 + }, + { + "epoch": 0.9559296855657341, + "grad_norm": 4.009050058724706, + "learning_rate": 1.5254871413716397e-08, + "loss": 0.5011, + "step": 23166 + }, + { + "epoch": 0.955970949905092, + "grad_norm": 4.716549129192201, + "learning_rate": 1.522636601922822e-08, + "loss": 0.5274, + "step": 23167 + }, + { + "epoch": 0.9560122142444499, + "grad_norm": 1.9804845336014667, + "learning_rate": 1.5197887146511493e-08, + "loss": 0.5187, + "step": 23168 + }, + { + "epoch": 0.9560534785838078, + "grad_norm": 3.9093206590774576, + "learning_rate": 1.5169434796074966e-08, + "loss": 0.4926, + "step": 23169 + }, + { + "epoch": 0.9560947429231658, + "grad_norm": 3.1105292495991437, + "learning_rate": 1.514100896842674e-08, + "loss": 0.4794, + "step": 23170 + }, + { + "epoch": 0.9561360072625237, + "grad_norm": 2.1865608938144674, + "learning_rate": 1.511260966407474e-08, + "loss": 0.4958, + "step": 23171 + }, + { + "epoch": 0.9561772716018817, + "grad_norm": 4.691169746331666, + "learning_rate": 1.508423688352606e-08, + "loss": 0.5551, + "step": 23172 + }, + { + "epoch": 0.9562185359412396, + "grad_norm": 1.8422543621638285, + "learning_rate": 1.5055890627287627e-08, + "loss": 0.4541, + "step": 23173 + }, + { + "epoch": 0.9562598002805975, + "grad_norm": 2.319708006746668, + "learning_rate": 1.502757089586587e-08, + "loss": 0.5809, + "step": 23174 + }, + { + "epoch": 0.9563010646199555, + "grad_norm": 2.7259999403529878, + "learning_rate": 1.4999277689766388e-08, + "loss": 0.5648, + "step": 23175 + }, + { + "epoch": 0.9563423289593134, + "grad_norm": 3.063315389728632, + "learning_rate": 1.4971011009494772e-08, + "loss": 0.5058, + "step": 23176 + }, + { + "epoch": 0.9563835932986713, + "grad_norm": 3.8536907766296338, + "learning_rate": 1.4942770855555786e-08, + "loss": 0.4546, + "step": 23177 + }, + { + "epoch": 0.9564248576380292, + "grad_norm": 4.731834022875792, + "learning_rate": 1.491455722845403e-08, + "loss": 0.574, + "step": 23178 + }, + { + "epoch": 0.9564661219773871, + "grad_norm": 4.320692509403578, + "learning_rate": 1.4886370128693427e-08, + "loss": 0.5104, + "step": 23179 + }, + { + "epoch": 0.956507386316745, + "grad_norm": 5.975331202873923, + "learning_rate": 1.4858209556777413e-08, + "loss": 0.4965, + "step": 23180 + }, + { + "epoch": 0.956548650656103, + "grad_norm": 8.328275078444156, + "learning_rate": 1.4830075513208918e-08, + "loss": 0.5393, + "step": 23181 + }, + { + "epoch": 0.956589914995461, + "grad_norm": 4.5235907604287915, + "learning_rate": 1.4801967998490707e-08, + "loss": 0.5918, + "step": 23182 + }, + { + "epoch": 0.9566311793348189, + "grad_norm": 2.028028656392331, + "learning_rate": 1.477388701312471e-08, + "loss": 0.487, + "step": 23183 + }, + { + "epoch": 0.9566724436741768, + "grad_norm": 2.823810264228282, + "learning_rate": 1.4745832557612693e-08, + "loss": 0.52, + "step": 23184 + }, + { + "epoch": 0.9567137080135347, + "grad_norm": 5.339148435924647, + "learning_rate": 1.4717804632455423e-08, + "loss": 0.4748, + "step": 23185 + }, + { + "epoch": 0.9567549723528926, + "grad_norm": 6.228226680415253, + "learning_rate": 1.4689803238153998e-08, + "loss": 0.5801, + "step": 23186 + }, + { + "epoch": 0.9567962366922506, + "grad_norm": 2.590465975815702, + "learning_rate": 1.4661828375208187e-08, + "loss": 0.5327, + "step": 23187 + }, + { + "epoch": 0.9568375010316085, + "grad_norm": 4.407878699309924, + "learning_rate": 1.463388004411792e-08, + "loss": 0.5626, + "step": 23188 + }, + { + "epoch": 0.9568787653709664, + "grad_norm": 5.041040089324075, + "learning_rate": 1.4605958245382467e-08, + "loss": 0.4905, + "step": 23189 + }, + { + "epoch": 0.9569200297103243, + "grad_norm": 1.934666234080958, + "learning_rate": 1.4578062979500263e-08, + "loss": 0.5119, + "step": 23190 + }, + { + "epoch": 0.9569612940496822, + "grad_norm": 3.1899085438940507, + "learning_rate": 1.4550194246970072e-08, + "loss": 0.4745, + "step": 23191 + }, + { + "epoch": 0.9570025583890401, + "grad_norm": 3.991513629676071, + "learning_rate": 1.4522352048289167e-08, + "loss": 0.5399, + "step": 23192 + }, + { + "epoch": 0.9570438227283982, + "grad_norm": 5.196603496354093, + "learning_rate": 1.4494536383955315e-08, + "loss": 0.5798, + "step": 23193 + }, + { + "epoch": 0.9570850870677561, + "grad_norm": 1.984340574492392, + "learning_rate": 1.4466747254465119e-08, + "loss": 0.467, + "step": 23194 + }, + { + "epoch": 0.957126351407114, + "grad_norm": 2.226023767774604, + "learning_rate": 1.4438984660315179e-08, + "loss": 0.4863, + "step": 23195 + }, + { + "epoch": 0.9571676157464719, + "grad_norm": 6.569962467988115, + "learning_rate": 1.441124860200127e-08, + "loss": 0.5208, + "step": 23196 + }, + { + "epoch": 0.9572088800858298, + "grad_norm": 2.4798256851622242, + "learning_rate": 1.4383539080018826e-08, + "loss": 0.4782, + "step": 23197 + }, + { + "epoch": 0.9572501444251877, + "grad_norm": 2.2718228738449064, + "learning_rate": 1.4355856094862618e-08, + "loss": 0.5394, + "step": 23198 + }, + { + "epoch": 0.9572914087645457, + "grad_norm": 3.7545981068479546, + "learning_rate": 1.4328199647027584e-08, + "loss": 0.5165, + "step": 23199 + }, + { + "epoch": 0.9573326731039036, + "grad_norm": 11.499265038518544, + "learning_rate": 1.430056973700733e-08, + "loss": 0.5224, + "step": 23200 + }, + { + "epoch": 0.9573739374432615, + "grad_norm": 8.738592180191539, + "learning_rate": 1.4272966365295625e-08, + "loss": 0.5388, + "step": 23201 + }, + { + "epoch": 0.9574152017826194, + "grad_norm": 4.229321948958505, + "learning_rate": 1.4245389532385578e-08, + "loss": 0.5158, + "step": 23202 + }, + { + "epoch": 0.9574564661219774, + "grad_norm": 3.3865874248114283, + "learning_rate": 1.4217839238769625e-08, + "loss": 0.507, + "step": 23203 + }, + { + "epoch": 0.9574977304613354, + "grad_norm": 4.435611949571968, + "learning_rate": 1.419031548493971e-08, + "loss": 0.5412, + "step": 23204 + }, + { + "epoch": 0.9575389948006933, + "grad_norm": 2.595830534813469, + "learning_rate": 1.4162818271387767e-08, + "loss": 0.4818, + "step": 23205 + }, + { + "epoch": 0.9575802591400512, + "grad_norm": 3.802099163657852, + "learning_rate": 1.4135347598604909e-08, + "loss": 0.5061, + "step": 23206 + }, + { + "epoch": 0.9576215234794091, + "grad_norm": 16.240108338116855, + "learning_rate": 1.4107903467081906e-08, + "loss": 0.5245, + "step": 23207 + }, + { + "epoch": 0.957662787818767, + "grad_norm": 13.48818230706772, + "learning_rate": 1.4080485877308702e-08, + "loss": 0.5082, + "step": 23208 + }, + { + "epoch": 0.9577040521581249, + "grad_norm": 6.397797166427704, + "learning_rate": 1.4053094829775404e-08, + "loss": 0.4831, + "step": 23209 + }, + { + "epoch": 0.9577453164974828, + "grad_norm": 21.299142009526232, + "learning_rate": 1.4025730324970954e-08, + "loss": 0.4997, + "step": 23210 + }, + { + "epoch": 0.9577865808368408, + "grad_norm": 3.7583116644100856, + "learning_rate": 1.399839236338446e-08, + "loss": 0.502, + "step": 23211 + }, + { + "epoch": 0.9578278451761987, + "grad_norm": 2.155112221578987, + "learning_rate": 1.3971080945503866e-08, + "loss": 0.5456, + "step": 23212 + }, + { + "epoch": 0.9578691095155567, + "grad_norm": 2.2004686650870315, + "learning_rate": 1.3943796071817283e-08, + "loss": 0.4385, + "step": 23213 + }, + { + "epoch": 0.9579103738549146, + "grad_norm": 5.660526450985996, + "learning_rate": 1.3916537742812152e-08, + "loss": 0.4743, + "step": 23214 + }, + { + "epoch": 0.9579516381942725, + "grad_norm": 4.242735185228643, + "learning_rate": 1.388930595897525e-08, + "loss": 0.5081, + "step": 23215 + }, + { + "epoch": 0.9579929025336305, + "grad_norm": 3.7968447065192006, + "learning_rate": 1.386210072079319e-08, + "loss": 0.487, + "step": 23216 + }, + { + "epoch": 0.9580341668729884, + "grad_norm": 2.6788295727926803, + "learning_rate": 1.3834922028751418e-08, + "loss": 0.525, + "step": 23217 + }, + { + "epoch": 0.9580754312123463, + "grad_norm": 4.092755289167808, + "learning_rate": 1.3807769883335874e-08, + "loss": 0.5583, + "step": 23218 + }, + { + "epoch": 0.9581166955517042, + "grad_norm": 2.681294308380711, + "learning_rate": 1.3780644285031674e-08, + "loss": 0.4772, + "step": 23219 + }, + { + "epoch": 0.9581579598910621, + "grad_norm": 7.597498172092563, + "learning_rate": 1.3753545234322929e-08, + "loss": 0.4997, + "step": 23220 + }, + { + "epoch": 0.95819922423042, + "grad_norm": 2.8737496631549373, + "learning_rate": 1.3726472731693751e-08, + "loss": 0.5201, + "step": 23221 + }, + { + "epoch": 0.958240488569778, + "grad_norm": 7.0636299549050605, + "learning_rate": 1.3699426777627922e-08, + "loss": 0.5599, + "step": 23222 + }, + { + "epoch": 0.958281752909136, + "grad_norm": 5.507516778640473, + "learning_rate": 1.3672407372608719e-08, + "loss": 0.4902, + "step": 23223 + }, + { + "epoch": 0.9583230172484939, + "grad_norm": 2.787896207254357, + "learning_rate": 1.364541451711826e-08, + "loss": 0.5251, + "step": 23224 + }, + { + "epoch": 0.9583642815878518, + "grad_norm": 2.90133322235208, + "learning_rate": 1.3618448211639156e-08, + "loss": 0.5623, + "step": 23225 + }, + { + "epoch": 0.9584055459272097, + "grad_norm": 2.4506941289318616, + "learning_rate": 1.3591508456652856e-08, + "loss": 0.5002, + "step": 23226 + }, + { + "epoch": 0.9584468102665676, + "grad_norm": 2.91659237684545, + "learning_rate": 1.3564595252640644e-08, + "loss": 0.5601, + "step": 23227 + }, + { + "epoch": 0.9584880746059256, + "grad_norm": 2.6130389940227277, + "learning_rate": 1.3537708600083299e-08, + "loss": 0.5011, + "step": 23228 + }, + { + "epoch": 0.9585293389452835, + "grad_norm": 6.759287281656923, + "learning_rate": 1.3510848499461104e-08, + "loss": 0.5337, + "step": 23229 + }, + { + "epoch": 0.9585706032846414, + "grad_norm": 2.6332343589632363, + "learning_rate": 1.3484014951253676e-08, + "loss": 0.4352, + "step": 23230 + }, + { + "epoch": 0.9586118676239993, + "grad_norm": 2.658055541899233, + "learning_rate": 1.3457207955940299e-08, + "loss": 0.4739, + "step": 23231 + }, + { + "epoch": 0.9586531319633572, + "grad_norm": 2.5904201572375443, + "learning_rate": 1.3430427514000254e-08, + "loss": 0.4838, + "step": 23232 + }, + { + "epoch": 0.9586943963027152, + "grad_norm": 4.02963186329755, + "learning_rate": 1.3403673625911327e-08, + "loss": 0.5088, + "step": 23233 + }, + { + "epoch": 0.9587356606420732, + "grad_norm": 5.926026059880553, + "learning_rate": 1.3376946292151804e-08, + "loss": 0.5221, + "step": 23234 + }, + { + "epoch": 0.9587769249814311, + "grad_norm": 5.906279705213683, + "learning_rate": 1.3350245513198966e-08, + "loss": 0.4513, + "step": 23235 + }, + { + "epoch": 0.958818189320789, + "grad_norm": 8.141396374798296, + "learning_rate": 1.3323571289529768e-08, + "loss": 0.5286, + "step": 23236 + }, + { + "epoch": 0.9588594536601469, + "grad_norm": 3.1832198588292333, + "learning_rate": 1.329692362162066e-08, + "loss": 0.4943, + "step": 23237 + }, + { + "epoch": 0.9589007179995048, + "grad_norm": 2.551028608574148, + "learning_rate": 1.3270302509947596e-08, + "loss": 0.4998, + "step": 23238 + }, + { + "epoch": 0.9589419823388627, + "grad_norm": 2.352260457986602, + "learning_rate": 1.3243707954986362e-08, + "loss": 0.5246, + "step": 23239 + }, + { + "epoch": 0.9589832466782207, + "grad_norm": 3.502468593782747, + "learning_rate": 1.3217139957211744e-08, + "loss": 0.4771, + "step": 23240 + }, + { + "epoch": 0.9590245110175786, + "grad_norm": 2.93852509345436, + "learning_rate": 1.3190598517098195e-08, + "loss": 0.4906, + "step": 23241 + }, + { + "epoch": 0.9590657753569365, + "grad_norm": 4.017838160231643, + "learning_rate": 1.3164083635120173e-08, + "loss": 0.5202, + "step": 23242 + }, + { + "epoch": 0.9591070396962945, + "grad_norm": 3.877778217978738, + "learning_rate": 1.313759531175096e-08, + "loss": 0.4491, + "step": 23243 + }, + { + "epoch": 0.9591483040356524, + "grad_norm": 24.89127297604924, + "learning_rate": 1.3111133547464016e-08, + "loss": 0.4758, + "step": 23244 + }, + { + "epoch": 0.9591895683750103, + "grad_norm": 2.7943620744714868, + "learning_rate": 1.3084698342731794e-08, + "loss": 0.5036, + "step": 23245 + }, + { + "epoch": 0.9592308327143683, + "grad_norm": 3.920301504358168, + "learning_rate": 1.3058289698026415e-08, + "loss": 0.5337, + "step": 23246 + }, + { + "epoch": 0.9592720970537262, + "grad_norm": 3.3268395596073406, + "learning_rate": 1.3031907613819838e-08, + "loss": 0.5198, + "step": 23247 + }, + { + "epoch": 0.9593133613930841, + "grad_norm": 4.219733619921309, + "learning_rate": 1.300555209058335e-08, + "loss": 0.4918, + "step": 23248 + }, + { + "epoch": 0.959354625732442, + "grad_norm": 4.923856702564474, + "learning_rate": 1.2979223128787409e-08, + "loss": 0.4873, + "step": 23249 + }, + { + "epoch": 0.9593958900717999, + "grad_norm": 15.687806498719674, + "learning_rate": 1.2952920728902473e-08, + "loss": 0.488, + "step": 23250 + }, + { + "epoch": 0.9594371544111578, + "grad_norm": 2.5414028566766036, + "learning_rate": 1.2926644891398498e-08, + "loss": 0.5143, + "step": 23251 + }, + { + "epoch": 0.9594784187505158, + "grad_norm": 2.4656525420266133, + "learning_rate": 1.2900395616744776e-08, + "loss": 0.5682, + "step": 23252 + }, + { + "epoch": 0.9595196830898737, + "grad_norm": 1.7160933189628083, + "learning_rate": 1.2874172905410098e-08, + "loss": 0.4542, + "step": 23253 + }, + { + "epoch": 0.9595609474292317, + "grad_norm": 4.6572120439563935, + "learning_rate": 1.2847976757862756e-08, + "loss": 0.5306, + "step": 23254 + }, + { + "epoch": 0.9596022117685896, + "grad_norm": 2.2690704421699985, + "learning_rate": 1.2821807174571042e-08, + "loss": 0.4701, + "step": 23255 + }, + { + "epoch": 0.9596434761079475, + "grad_norm": 3.0049368981210987, + "learning_rate": 1.2795664156002251e-08, + "loss": 0.5749, + "step": 23256 + }, + { + "epoch": 0.9596847404473055, + "grad_norm": 6.737074357689753, + "learning_rate": 1.2769547702623174e-08, + "loss": 0.5022, + "step": 23257 + }, + { + "epoch": 0.9597260047866634, + "grad_norm": 5.587179390146276, + "learning_rate": 1.2743457814900439e-08, + "loss": 0.5606, + "step": 23258 + }, + { + "epoch": 0.9597672691260213, + "grad_norm": 2.4175169496731033, + "learning_rate": 1.2717394493300172e-08, + "loss": 0.4917, + "step": 23259 + }, + { + "epoch": 0.9598085334653792, + "grad_norm": 3.2605265446557783, + "learning_rate": 1.2691357738287834e-08, + "loss": 0.5137, + "step": 23260 + }, + { + "epoch": 0.9598497978047371, + "grad_norm": 2.0762764844616317, + "learning_rate": 1.2665347550328388e-08, + "loss": 0.5036, + "step": 23261 + }, + { + "epoch": 0.959891062144095, + "grad_norm": 3.4857641901782563, + "learning_rate": 1.2639363929886626e-08, + "loss": 0.4989, + "step": 23262 + }, + { + "epoch": 0.959932326483453, + "grad_norm": 2.611142808984061, + "learning_rate": 1.261340687742668e-08, + "loss": 0.5154, + "step": 23263 + }, + { + "epoch": 0.959973590822811, + "grad_norm": 5.676642108307349, + "learning_rate": 1.2587476393412345e-08, + "loss": 0.5423, + "step": 23264 + }, + { + "epoch": 0.9600148551621689, + "grad_norm": 2.2147672411284014, + "learning_rate": 1.2561572478306415e-08, + "loss": 0.4342, + "step": 23265 + }, + { + "epoch": 0.9600561195015268, + "grad_norm": 2.9696211354319457, + "learning_rate": 1.253569513257169e-08, + "loss": 0.4745, + "step": 23266 + }, + { + "epoch": 0.9600973838408847, + "grad_norm": 2.6085988059377994, + "learning_rate": 1.250984435667063e-08, + "loss": 0.5116, + "step": 23267 + }, + { + "epoch": 0.9601386481802426, + "grad_norm": 2.6612130332006743, + "learning_rate": 1.2484020151065035e-08, + "loss": 0.5591, + "step": 23268 + }, + { + "epoch": 0.9601799125196006, + "grad_norm": 2.3407787550856205, + "learning_rate": 1.2458222516215866e-08, + "loss": 0.4872, + "step": 23269 + }, + { + "epoch": 0.9602211768589585, + "grad_norm": 2.8408415551618433, + "learning_rate": 1.2432451452584259e-08, + "loss": 0.525, + "step": 23270 + }, + { + "epoch": 0.9602624411983164, + "grad_norm": 3.632831000473081, + "learning_rate": 1.2406706960630176e-08, + "loss": 0.5351, + "step": 23271 + }, + { + "epoch": 0.9603037055376743, + "grad_norm": 5.610061469403799, + "learning_rate": 1.238098904081375e-08, + "loss": 0.4999, + "step": 23272 + }, + { + "epoch": 0.9603449698770322, + "grad_norm": 2.510383789413377, + "learning_rate": 1.2355297693594447e-08, + "loss": 0.4775, + "step": 23273 + }, + { + "epoch": 0.9603862342163902, + "grad_norm": 3.162250387814202, + "learning_rate": 1.23296329194309e-08, + "loss": 0.5204, + "step": 23274 + }, + { + "epoch": 0.9604274985557482, + "grad_norm": 2.505551292513325, + "learning_rate": 1.2303994718781742e-08, + "loss": 0.4976, + "step": 23275 + }, + { + "epoch": 0.9604687628951061, + "grad_norm": 2.193560654100165, + "learning_rate": 1.2278383092104772e-08, + "loss": 0.5291, + "step": 23276 + }, + { + "epoch": 0.960510027234464, + "grad_norm": 2.3108099557016866, + "learning_rate": 1.2252798039857793e-08, + "loss": 0.5261, + "step": 23277 + }, + { + "epoch": 0.9605512915738219, + "grad_norm": 6.93955473706846, + "learning_rate": 1.2227239562497606e-08, + "loss": 0.4691, + "step": 23278 + }, + { + "epoch": 0.9605925559131798, + "grad_norm": 17.36054739934261, + "learning_rate": 1.220170766048051e-08, + "loss": 0.5263, + "step": 23279 + }, + { + "epoch": 0.9606338202525377, + "grad_norm": 2.4900567495958454, + "learning_rate": 1.2176202334263142e-08, + "loss": 0.4685, + "step": 23280 + }, + { + "epoch": 0.9606750845918957, + "grad_norm": 3.268093625786096, + "learning_rate": 1.2150723584300638e-08, + "loss": 0.5236, + "step": 23281 + }, + { + "epoch": 0.9607163489312536, + "grad_norm": 2.8382441771789866, + "learning_rate": 1.2125271411048133e-08, + "loss": 0.5052, + "step": 23282 + }, + { + "epoch": 0.9607576132706115, + "grad_norm": 3.1825590141646125, + "learning_rate": 1.2099845814960431e-08, + "loss": 0.472, + "step": 23283 + }, + { + "epoch": 0.9607988776099695, + "grad_norm": 4.797011368474692, + "learning_rate": 1.2074446796491667e-08, + "loss": 0.5245, + "step": 23284 + }, + { + "epoch": 0.9608401419493274, + "grad_norm": 3.5537797800558906, + "learning_rate": 1.204907435609548e-08, + "loss": 0.5052, + "step": 23285 + }, + { + "epoch": 0.9608814062886853, + "grad_norm": 2.0474702783647047, + "learning_rate": 1.2023728494225173e-08, + "loss": 0.5083, + "step": 23286 + }, + { + "epoch": 0.9609226706280433, + "grad_norm": 5.013407397583883, + "learning_rate": 1.1998409211333383e-08, + "loss": 0.4566, + "step": 23287 + }, + { + "epoch": 0.9609639349674012, + "grad_norm": 2.410389203228389, + "learning_rate": 1.1973116507872251e-08, + "loss": 0.525, + "step": 23288 + }, + { + "epoch": 0.9610051993067591, + "grad_norm": 2.8538294428015285, + "learning_rate": 1.1947850384293912e-08, + "loss": 0.4595, + "step": 23289 + }, + { + "epoch": 0.961046463646117, + "grad_norm": 4.0967936964073655, + "learning_rate": 1.1922610841049508e-08, + "loss": 0.4784, + "step": 23290 + }, + { + "epoch": 0.9610877279854749, + "grad_norm": 3.5494117192218, + "learning_rate": 1.1897397878589844e-08, + "loss": 0.5747, + "step": 23291 + }, + { + "epoch": 0.9611289923248328, + "grad_norm": 5.030490564729208, + "learning_rate": 1.1872211497365226e-08, + "loss": 0.4819, + "step": 23292 + }, + { + "epoch": 0.9611702566641908, + "grad_norm": 2.833930140056212, + "learning_rate": 1.1847051697825796e-08, + "loss": 0.4577, + "step": 23293 + }, + { + "epoch": 0.9612115210035488, + "grad_norm": 5.6944274368134575, + "learning_rate": 1.182191848042069e-08, + "loss": 0.4882, + "step": 23294 + }, + { + "epoch": 0.9612527853429067, + "grad_norm": 2.0787818186675158, + "learning_rate": 1.1796811845599053e-08, + "loss": 0.5371, + "step": 23295 + }, + { + "epoch": 0.9612940496822646, + "grad_norm": 3.1357729794671774, + "learning_rate": 1.1771731793809193e-08, + "loss": 0.492, + "step": 23296 + }, + { + "epoch": 0.9613353140216225, + "grad_norm": 3.8325784302962367, + "learning_rate": 1.1746678325499417e-08, + "loss": 0.5251, + "step": 23297 + }, + { + "epoch": 0.9613765783609804, + "grad_norm": 2.568821529259044, + "learning_rate": 1.1721651441116699e-08, + "loss": 0.446, + "step": 23298 + }, + { + "epoch": 0.9614178427003384, + "grad_norm": 5.826924912215076, + "learning_rate": 1.1696651141108516e-08, + "loss": 0.4747, + "step": 23299 + }, + { + "epoch": 0.9614591070396963, + "grad_norm": 2.443183378023793, + "learning_rate": 1.1671677425921178e-08, + "loss": 0.4977, + "step": 23300 + }, + { + "epoch": 0.9615003713790542, + "grad_norm": 3.623203957544378, + "learning_rate": 1.1646730296001162e-08, + "loss": 0.4667, + "step": 23301 + }, + { + "epoch": 0.9615416357184121, + "grad_norm": 2.4199821810361724, + "learning_rate": 1.162180975179361e-08, + "loss": 0.4788, + "step": 23302 + }, + { + "epoch": 0.96158290005777, + "grad_norm": 13.649516382885336, + "learning_rate": 1.1596915793744e-08, + "loss": 0.503, + "step": 23303 + }, + { + "epoch": 0.9616241643971281, + "grad_norm": 2.1276438655903918, + "learning_rate": 1.157204842229681e-08, + "loss": 0.522, + "step": 23304 + }, + { + "epoch": 0.961665428736486, + "grad_norm": 4.697014000815438, + "learning_rate": 1.154720763789635e-08, + "loss": 0.5432, + "step": 23305 + }, + { + "epoch": 0.9617066930758439, + "grad_norm": 2.1681011171639746, + "learning_rate": 1.1522393440986267e-08, + "loss": 0.4985, + "step": 23306 + }, + { + "epoch": 0.9617479574152018, + "grad_norm": 3.61636465606932, + "learning_rate": 1.1497605832009871e-08, + "loss": 0.5112, + "step": 23307 + }, + { + "epoch": 0.9617892217545597, + "grad_norm": 3.1673000836074654, + "learning_rate": 1.1472844811409977e-08, + "loss": 0.5279, + "step": 23308 + }, + { + "epoch": 0.9618304860939176, + "grad_norm": 2.960860481129806, + "learning_rate": 1.1448110379628728e-08, + "loss": 0.5523, + "step": 23309 + }, + { + "epoch": 0.9618717504332756, + "grad_norm": 5.288312397245361, + "learning_rate": 1.1423402537108108e-08, + "loss": 0.559, + "step": 23310 + }, + { + "epoch": 0.9619130147726335, + "grad_norm": 3.790299633182456, + "learning_rate": 1.139872128428926e-08, + "loss": 0.4765, + "step": 23311 + }, + { + "epoch": 0.9619542791119914, + "grad_norm": 5.148999766920851, + "learning_rate": 1.1374066621613e-08, + "loss": 0.5192, + "step": 23312 + }, + { + "epoch": 0.9619955434513493, + "grad_norm": 3.572755670483737, + "learning_rate": 1.1349438549520142e-08, + "loss": 0.4953, + "step": 23313 + }, + { + "epoch": 0.9620368077907072, + "grad_norm": 2.3768471170912804, + "learning_rate": 1.1324837068450334e-08, + "loss": 0.5199, + "step": 23314 + }, + { + "epoch": 0.9620780721300652, + "grad_norm": 3.1329472605199213, + "learning_rate": 1.130026217884289e-08, + "loss": 0.4543, + "step": 23315 + }, + { + "epoch": 0.9621193364694232, + "grad_norm": 2.4159171190233786, + "learning_rate": 1.1275713881136963e-08, + "loss": 0.5064, + "step": 23316 + }, + { + "epoch": 0.9621606008087811, + "grad_norm": 9.471007623739522, + "learning_rate": 1.125119217577103e-08, + "loss": 0.5773, + "step": 23317 + }, + { + "epoch": 0.962201865148139, + "grad_norm": 4.757462927803177, + "learning_rate": 1.1226697063183077e-08, + "loss": 0.5416, + "step": 23318 + }, + { + "epoch": 0.9622431294874969, + "grad_norm": 2.685258773192288, + "learning_rate": 1.1202228543810589e-08, + "loss": 0.5546, + "step": 23319 + }, + { + "epoch": 0.9622843938268548, + "grad_norm": 2.6596184764987405, + "learning_rate": 1.1177786618090714e-08, + "loss": 0.5208, + "step": 23320 + }, + { + "epoch": 0.9623256581662127, + "grad_norm": 2.320733669363254, + "learning_rate": 1.1153371286460268e-08, + "loss": 0.5404, + "step": 23321 + }, + { + "epoch": 0.9623669225055707, + "grad_norm": 4.122178074763191, + "learning_rate": 1.1128982549354905e-08, + "loss": 0.4962, + "step": 23322 + }, + { + "epoch": 0.9624081868449286, + "grad_norm": 3.395451562192501, + "learning_rate": 1.1104620407210608e-08, + "loss": 0.5129, + "step": 23323 + }, + { + "epoch": 0.9624494511842865, + "grad_norm": 9.616709916950265, + "learning_rate": 1.1080284860462364e-08, + "loss": 0.5256, + "step": 23324 + }, + { + "epoch": 0.9624907155236445, + "grad_norm": 5.44017527903165, + "learning_rate": 1.1055975909544824e-08, + "loss": 0.4623, + "step": 23325 + }, + { + "epoch": 0.9625319798630024, + "grad_norm": 5.115601886400256, + "learning_rate": 1.1031693554892475e-08, + "loss": 0.5049, + "step": 23326 + }, + { + "epoch": 0.9625732442023603, + "grad_norm": 4.5698014380990335, + "learning_rate": 1.1007437796938968e-08, + "loss": 0.5339, + "step": 23327 + }, + { + "epoch": 0.9626145085417183, + "grad_norm": 2.133531808155511, + "learning_rate": 1.0983208636117293e-08, + "loss": 0.4715, + "step": 23328 + }, + { + "epoch": 0.9626557728810762, + "grad_norm": 2.808004453602363, + "learning_rate": 1.09590060728606e-08, + "loss": 0.4823, + "step": 23329 + }, + { + "epoch": 0.9626970372204341, + "grad_norm": 4.700589920262229, + "learning_rate": 1.0934830107601046e-08, + "loss": 0.5908, + "step": 23330 + }, + { + "epoch": 0.962738301559792, + "grad_norm": 2.4083607792721202, + "learning_rate": 1.0910680740770617e-08, + "loss": 0.4964, + "step": 23331 + }, + { + "epoch": 0.9627795658991499, + "grad_norm": 2.649933772598036, + "learning_rate": 1.0886557972800305e-08, + "loss": 0.5466, + "step": 23332 + }, + { + "epoch": 0.9628208302385078, + "grad_norm": 5.4413696286478554, + "learning_rate": 1.0862461804121426e-08, + "loss": 0.4631, + "step": 23333 + }, + { + "epoch": 0.9628620945778658, + "grad_norm": 3.2131323505903704, + "learning_rate": 1.0838392235164307e-08, + "loss": 0.5142, + "step": 23334 + }, + { + "epoch": 0.9629033589172238, + "grad_norm": 6.446838217318019, + "learning_rate": 1.081434926635877e-08, + "loss": 0.5399, + "step": 23335 + }, + { + "epoch": 0.9629446232565817, + "grad_norm": 3.14998832200406, + "learning_rate": 1.0790332898134304e-08, + "loss": 0.5828, + "step": 23336 + }, + { + "epoch": 0.9629858875959396, + "grad_norm": 2.4279854314827674, + "learning_rate": 1.0766343130919732e-08, + "loss": 0.4955, + "step": 23337 + }, + { + "epoch": 0.9630271519352975, + "grad_norm": 3.4124471469417545, + "learning_rate": 1.0742379965144045e-08, + "loss": 0.5042, + "step": 23338 + }, + { + "epoch": 0.9630684162746554, + "grad_norm": 4.01693170232218, + "learning_rate": 1.07184434012349e-08, + "loss": 0.4795, + "step": 23339 + }, + { + "epoch": 0.9631096806140134, + "grad_norm": 3.0165070362951205, + "learning_rate": 1.0694533439619957e-08, + "loss": 0.478, + "step": 23340 + }, + { + "epoch": 0.9631509449533713, + "grad_norm": 8.891344160927963, + "learning_rate": 1.0670650080726208e-08, + "loss": 0.5312, + "step": 23341 + }, + { + "epoch": 0.9631922092927292, + "grad_norm": 6.909677096269176, + "learning_rate": 1.0646793324980475e-08, + "loss": 0.4694, + "step": 23342 + }, + { + "epoch": 0.9632334736320871, + "grad_norm": 2.5285182130368193, + "learning_rate": 1.0622963172808919e-08, + "loss": 0.5473, + "step": 23343 + }, + { + "epoch": 0.963274737971445, + "grad_norm": 3.34343955248815, + "learning_rate": 1.0599159624636867e-08, + "loss": 0.5577, + "step": 23344 + }, + { + "epoch": 0.963316002310803, + "grad_norm": 2.750750419536273, + "learning_rate": 1.0575382680889812e-08, + "loss": 0.4751, + "step": 23345 + }, + { + "epoch": 0.963357266650161, + "grad_norm": 11.64873053394673, + "learning_rate": 1.055163234199258e-08, + "loss": 0.5046, + "step": 23346 + }, + { + "epoch": 0.9633985309895189, + "grad_norm": 3.2478161688011666, + "learning_rate": 1.0527908608369163e-08, + "loss": 0.4931, + "step": 23347 + }, + { + "epoch": 0.9634397953288768, + "grad_norm": 2.656938238918931, + "learning_rate": 1.0504211480443226e-08, + "loss": 0.5201, + "step": 23348 + }, + { + "epoch": 0.9634810596682347, + "grad_norm": 7.011663240912893, + "learning_rate": 1.0480540958638263e-08, + "loss": 0.5856, + "step": 23349 + }, + { + "epoch": 0.9635223240075926, + "grad_norm": 2.5920606877764465, + "learning_rate": 1.04568970433771e-08, + "loss": 0.4788, + "step": 23350 + }, + { + "epoch": 0.9635635883469506, + "grad_norm": 2.5519101559347837, + "learning_rate": 1.0433279735082068e-08, + "loss": 0.5015, + "step": 23351 + }, + { + "epoch": 0.9636048526863085, + "grad_norm": 2.6258040897437764, + "learning_rate": 1.040968903417483e-08, + "loss": 0.4944, + "step": 23352 + }, + { + "epoch": 0.9636461170256664, + "grad_norm": 3.692916593719568, + "learning_rate": 1.0386124941077046e-08, + "loss": 0.4744, + "step": 23353 + }, + { + "epoch": 0.9636873813650243, + "grad_norm": 10.428613731684635, + "learning_rate": 1.0362587456209382e-08, + "loss": 0.4891, + "step": 23354 + }, + { + "epoch": 0.9637286457043823, + "grad_norm": 2.9628783617826637, + "learning_rate": 1.0339076579992669e-08, + "loss": 0.4664, + "step": 23355 + }, + { + "epoch": 0.9637699100437402, + "grad_norm": 2.771483647544888, + "learning_rate": 1.03155923128464e-08, + "loss": 0.5377, + "step": 23356 + }, + { + "epoch": 0.9638111743830982, + "grad_norm": 2.9898975741506297, + "learning_rate": 1.0292134655190244e-08, + "loss": 0.5105, + "step": 23357 + }, + { + "epoch": 0.9638524387224561, + "grad_norm": 11.16640027176327, + "learning_rate": 1.0268703607443364e-08, + "loss": 0.5182, + "step": 23358 + }, + { + "epoch": 0.963893703061814, + "grad_norm": 4.354531508278474, + "learning_rate": 1.0245299170024092e-08, + "loss": 0.4941, + "step": 23359 + }, + { + "epoch": 0.9639349674011719, + "grad_norm": 2.757952975367621, + "learning_rate": 1.022192134335076e-08, + "loss": 0.5788, + "step": 23360 + }, + { + "epoch": 0.9639762317405298, + "grad_norm": 2.8041432686735663, + "learning_rate": 1.0198570127840535e-08, + "loss": 0.4993, + "step": 23361 + }, + { + "epoch": 0.9640174960798877, + "grad_norm": 3.670943955438375, + "learning_rate": 1.0175245523910914e-08, + "loss": 0.4902, + "step": 23362 + }, + { + "epoch": 0.9640587604192457, + "grad_norm": 5.470044455764382, + "learning_rate": 1.0151947531978234e-08, + "loss": 0.518, + "step": 23363 + }, + { + "epoch": 0.9641000247586036, + "grad_norm": 3.6678394667454746, + "learning_rate": 1.0128676152458827e-08, + "loss": 0.5264, + "step": 23364 + }, + { + "epoch": 0.9641412890979616, + "grad_norm": 2.3483104092475546, + "learning_rate": 1.0105431385768527e-08, + "loss": 0.4755, + "step": 23365 + }, + { + "epoch": 0.9641825534373195, + "grad_norm": 16.017530955882574, + "learning_rate": 1.0082213232322167e-08, + "loss": 0.5509, + "step": 23366 + }, + { + "epoch": 0.9642238177766774, + "grad_norm": 4.399110232899899, + "learning_rate": 1.0059021692534754e-08, + "loss": 0.5051, + "step": 23367 + }, + { + "epoch": 0.9642650821160353, + "grad_norm": 2.4430802723794742, + "learning_rate": 1.0035856766820451e-08, + "loss": 0.5123, + "step": 23368 + }, + { + "epoch": 0.9643063464553933, + "grad_norm": 3.8632760991815798, + "learning_rate": 1.0012718455593095e-08, + "loss": 0.5382, + "step": 23369 + }, + { + "epoch": 0.9643476107947512, + "grad_norm": 4.1046408340424, + "learning_rate": 9.989606759265857e-09, + "loss": 0.4649, + "step": 23370 + }, + { + "epoch": 0.9643888751341091, + "grad_norm": 2.8048434465646794, + "learning_rate": 9.966521678251905e-09, + "loss": 0.5631, + "step": 23371 + }, + { + "epoch": 0.964430139473467, + "grad_norm": 3.6794046696032097, + "learning_rate": 9.943463212963244e-09, + "loss": 0.5203, + "step": 23372 + }, + { + "epoch": 0.9644714038128249, + "grad_norm": 18.248465212399818, + "learning_rate": 9.920431363811878e-09, + "loss": 0.4907, + "step": 23373 + }, + { + "epoch": 0.9645126681521828, + "grad_norm": 3.6100931288786486, + "learning_rate": 9.897426131209309e-09, + "loss": 0.5238, + "step": 23374 + }, + { + "epoch": 0.9645539324915408, + "grad_norm": 3.366260133438957, + "learning_rate": 9.874447515566209e-09, + "loss": 0.5288, + "step": 23375 + }, + { + "epoch": 0.9645951968308988, + "grad_norm": 21.805526389745467, + "learning_rate": 9.851495517293252e-09, + "loss": 0.4427, + "step": 23376 + }, + { + "epoch": 0.9646364611702567, + "grad_norm": 2.3886505218675658, + "learning_rate": 9.828570136800442e-09, + "loss": 0.5002, + "step": 23377 + }, + { + "epoch": 0.9646777255096146, + "grad_norm": 3.359421434252614, + "learning_rate": 9.805671374497117e-09, + "loss": 0.4682, + "step": 23378 + }, + { + "epoch": 0.9647189898489725, + "grad_norm": 4.390786100650023, + "learning_rate": 9.78279923079245e-09, + "loss": 0.533, + "step": 23379 + }, + { + "epoch": 0.9647602541883304, + "grad_norm": 5.207415144905475, + "learning_rate": 9.759953706094949e-09, + "loss": 0.5185, + "step": 23380 + }, + { + "epoch": 0.9648015185276884, + "grad_norm": 3.2908299098245792, + "learning_rate": 9.737134800812619e-09, + "loss": 0.5293, + "step": 23381 + }, + { + "epoch": 0.9648427828670463, + "grad_norm": 8.817768494218145, + "learning_rate": 9.714342515353136e-09, + "loss": 0.5565, + "step": 23382 + }, + { + "epoch": 0.9648840472064042, + "grad_norm": 3.5030415171547045, + "learning_rate": 9.691576850123673e-09, + "loss": 0.4828, + "step": 23383 + }, + { + "epoch": 0.9649253115457621, + "grad_norm": 10.56251141579121, + "learning_rate": 9.66883780553074e-09, + "loss": 0.5391, + "step": 23384 + }, + { + "epoch": 0.96496657588512, + "grad_norm": 3.884439057511571, + "learning_rate": 9.646125381980675e-09, + "loss": 0.461, + "step": 23385 + }, + { + "epoch": 0.965007840224478, + "grad_norm": 2.301419258094744, + "learning_rate": 9.623439579879157e-09, + "loss": 0.5105, + "step": 23386 + }, + { + "epoch": 0.965049104563836, + "grad_norm": 3.422282231771888, + "learning_rate": 9.60078039963136e-09, + "loss": 0.5147, + "step": 23387 + }, + { + "epoch": 0.9650903689031939, + "grad_norm": 3.31303943935893, + "learning_rate": 9.57814784164196e-09, + "loss": 0.5279, + "step": 23388 + }, + { + "epoch": 0.9651316332425518, + "grad_norm": 5.239382768926711, + "learning_rate": 9.555541906315302e-09, + "loss": 0.5085, + "step": 23389 + }, + { + "epoch": 0.9651728975819097, + "grad_norm": 2.7859948373495547, + "learning_rate": 9.532962594055228e-09, + "loss": 0.4945, + "step": 23390 + }, + { + "epoch": 0.9652141619212676, + "grad_norm": 2.3259956550421528, + "learning_rate": 9.510409905264916e-09, + "loss": 0.4509, + "step": 23391 + }, + { + "epoch": 0.9652554262606255, + "grad_norm": 2.5580824878707538, + "learning_rate": 9.487883840347378e-09, + "loss": 0.5451, + "step": 23392 + }, + { + "epoch": 0.9652966905999835, + "grad_norm": 3.9958597475741238, + "learning_rate": 9.465384399704957e-09, + "loss": 0.5384, + "step": 23393 + }, + { + "epoch": 0.9653379549393414, + "grad_norm": 6.40316612336711, + "learning_rate": 9.442911583739499e-09, + "loss": 0.47, + "step": 23394 + }, + { + "epoch": 0.9653792192786993, + "grad_norm": 2.5952877941091628, + "learning_rate": 9.420465392852518e-09, + "loss": 0.5402, + "step": 23395 + }, + { + "epoch": 0.9654204836180573, + "grad_norm": 2.76322197986081, + "learning_rate": 9.398045827444857e-09, + "loss": 0.4948, + "step": 23396 + }, + { + "epoch": 0.9654617479574152, + "grad_norm": 4.092706555659885, + "learning_rate": 9.375652887916864e-09, + "loss": 0.4616, + "step": 23397 + }, + { + "epoch": 0.9655030122967732, + "grad_norm": 2.0098861051266996, + "learning_rate": 9.353286574668719e-09, + "loss": 0.4643, + "step": 23398 + }, + { + "epoch": 0.9655442766361311, + "grad_norm": 3.383309701699536, + "learning_rate": 9.330946888100101e-09, + "loss": 0.5133, + "step": 23399 + }, + { + "epoch": 0.965585540975489, + "grad_norm": 2.243204264670759, + "learning_rate": 9.308633828609692e-09, + "loss": 0.5109, + "step": 23400 + }, + { + "epoch": 0.9656268053148469, + "grad_norm": 2.607849067720529, + "learning_rate": 9.28634739659634e-09, + "loss": 0.5209, + "step": 23401 + }, + { + "epoch": 0.9656680696542048, + "grad_norm": 4.981416504359311, + "learning_rate": 9.264087592457892e-09, + "loss": 0.4889, + "step": 23402 + }, + { + "epoch": 0.9657093339935627, + "grad_norm": 3.4972256021458583, + "learning_rate": 9.241854416592033e-09, + "loss": 0.5131, + "step": 23403 + }, + { + "epoch": 0.9657505983329207, + "grad_norm": 3.628053856385338, + "learning_rate": 9.219647869396108e-09, + "loss": 0.4665, + "step": 23404 + }, + { + "epoch": 0.9657918626722786, + "grad_norm": 10.58800278688196, + "learning_rate": 9.197467951266636e-09, + "loss": 0.5084, + "step": 23405 + }, + { + "epoch": 0.9658331270116366, + "grad_norm": 3.1304872942005084, + "learning_rate": 9.175314662599631e-09, + "loss": 0.4959, + "step": 23406 + }, + { + "epoch": 0.9658743913509945, + "grad_norm": 18.568996764752576, + "learning_rate": 9.153188003791112e-09, + "loss": 0.5621, + "step": 23407 + }, + { + "epoch": 0.9659156556903524, + "grad_norm": 3.5551154856971117, + "learning_rate": 9.131087975236263e-09, + "loss": 0.4725, + "step": 23408 + }, + { + "epoch": 0.9659569200297103, + "grad_norm": 5.224170191000107, + "learning_rate": 9.1090145773296e-09, + "loss": 0.5787, + "step": 23409 + }, + { + "epoch": 0.9659981843690683, + "grad_norm": 3.7599222607294847, + "learning_rate": 9.08696781046564e-09, + "loss": 0.5283, + "step": 23410 + }, + { + "epoch": 0.9660394487084262, + "grad_norm": 2.1836469348654166, + "learning_rate": 9.064947675038072e-09, + "loss": 0.4715, + "step": 23411 + }, + { + "epoch": 0.9660807130477841, + "grad_norm": 3.1237984926782847, + "learning_rate": 9.04295417144041e-09, + "loss": 0.5243, + "step": 23412 + }, + { + "epoch": 0.966121977387142, + "grad_norm": 6.361581114819381, + "learning_rate": 9.020987300065342e-09, + "loss": 0.5276, + "step": 23413 + }, + { + "epoch": 0.9661632417264999, + "grad_norm": 11.623808206415942, + "learning_rate": 8.99904706130522e-09, + "loss": 0.4786, + "step": 23414 + }, + { + "epoch": 0.9662045060658578, + "grad_norm": 3.872182430049084, + "learning_rate": 8.977133455552066e-09, + "loss": 0.5037, + "step": 23415 + }, + { + "epoch": 0.9662457704052159, + "grad_norm": 2.243691951954032, + "learning_rate": 8.9552464831974e-09, + "loss": 0.473, + "step": 23416 + }, + { + "epoch": 0.9662870347445738, + "grad_norm": 2.636705810256937, + "learning_rate": 8.933386144631905e-09, + "loss": 0.5147, + "step": 23417 + }, + { + "epoch": 0.9663282990839317, + "grad_norm": 2.689143757444847, + "learning_rate": 8.911552440246273e-09, + "loss": 0.5237, + "step": 23418 + }, + { + "epoch": 0.9663695634232896, + "grad_norm": 2.895991531646238, + "learning_rate": 8.889745370430358e-09, + "loss": 0.5511, + "step": 23419 + }, + { + "epoch": 0.9664108277626475, + "grad_norm": 2.3471647780549403, + "learning_rate": 8.867964935573846e-09, + "loss": 0.5332, + "step": 23420 + }, + { + "epoch": 0.9664520921020054, + "grad_norm": 5.854305787157324, + "learning_rate": 8.846211136065763e-09, + "loss": 0.5154, + "step": 23421 + }, + { + "epoch": 0.9664933564413634, + "grad_norm": 2.246901759463048, + "learning_rate": 8.824483972294628e-09, + "loss": 0.5104, + "step": 23422 + }, + { + "epoch": 0.9665346207807213, + "grad_norm": 3.231590251568746, + "learning_rate": 8.802783444648466e-09, + "loss": 0.5271, + "step": 23423 + }, + { + "epoch": 0.9665758851200792, + "grad_norm": 2.5869408391406448, + "learning_rate": 8.781109553515132e-09, + "loss": 0.5583, + "step": 23424 + }, + { + "epoch": 0.9666171494594371, + "grad_norm": 8.262031696263213, + "learning_rate": 8.759462299281651e-09, + "loss": 0.4914, + "step": 23425 + }, + { + "epoch": 0.9666584137987951, + "grad_norm": 4.305753628180835, + "learning_rate": 8.737841682334547e-09, + "loss": 0.4953, + "step": 23426 + }, + { + "epoch": 0.966699678138153, + "grad_norm": 10.397372370521799, + "learning_rate": 8.716247703060176e-09, + "loss": 0.5038, + "step": 23427 + }, + { + "epoch": 0.966740942477511, + "grad_norm": 3.5199425664838775, + "learning_rate": 8.694680361844398e-09, + "loss": 0.5161, + "step": 23428 + }, + { + "epoch": 0.9667822068168689, + "grad_norm": 2.81272211110294, + "learning_rate": 8.673139659072237e-09, + "loss": 0.4861, + "step": 23429 + }, + { + "epoch": 0.9668234711562268, + "grad_norm": 3.264234621919759, + "learning_rate": 8.651625595128388e-09, + "loss": 0.492, + "step": 23430 + }, + { + "epoch": 0.9668647354955847, + "grad_norm": 3.4739737420554966, + "learning_rate": 8.630138170397373e-09, + "loss": 0.4945, + "step": 23431 + }, + { + "epoch": 0.9669059998349426, + "grad_norm": 3.455896913110943, + "learning_rate": 8.608677385262887e-09, + "loss": 0.4965, + "step": 23432 + }, + { + "epoch": 0.9669472641743005, + "grad_norm": 3.182730263737123, + "learning_rate": 8.587243240108455e-09, + "loss": 0.5111, + "step": 23433 + }, + { + "epoch": 0.9669885285136585, + "grad_norm": 3.220619776455829, + "learning_rate": 8.565835735316773e-09, + "loss": 0.5086, + "step": 23434 + }, + { + "epoch": 0.9670297928530164, + "grad_norm": 1.7977835508725672, + "learning_rate": 8.544454871270035e-09, + "loss": 0.5376, + "step": 23435 + }, + { + "epoch": 0.9670710571923743, + "grad_norm": 2.336547022279386, + "learning_rate": 8.523100648350601e-09, + "loss": 0.5411, + "step": 23436 + }, + { + "epoch": 0.9671123215317323, + "grad_norm": 2.0834980350384718, + "learning_rate": 8.501773066939667e-09, + "loss": 0.5292, + "step": 23437 + }, + { + "epoch": 0.9671535858710902, + "grad_norm": 4.650125408683352, + "learning_rate": 8.48047212741826e-09, + "loss": 0.4945, + "step": 23438 + }, + { + "epoch": 0.9671948502104482, + "grad_norm": 3.1915469230271114, + "learning_rate": 8.459197830166743e-09, + "loss": 0.5389, + "step": 23439 + }, + { + "epoch": 0.9672361145498061, + "grad_norm": 2.3287636196904846, + "learning_rate": 8.437950175565146e-09, + "loss": 0.4569, + "step": 23440 + }, + { + "epoch": 0.967277378889164, + "grad_norm": 3.5729447317641365, + "learning_rate": 8.41672916399333e-09, + "loss": 0.5015, + "step": 23441 + }, + { + "epoch": 0.9673186432285219, + "grad_norm": 2.472920719958629, + "learning_rate": 8.395534795829829e-09, + "loss": 0.4802, + "step": 23442 + }, + { + "epoch": 0.9673599075678798, + "grad_norm": 2.51088480509239, + "learning_rate": 8.37436707145367e-09, + "loss": 0.5282, + "step": 23443 + }, + { + "epoch": 0.9674011719072377, + "grad_norm": 5.491453548975247, + "learning_rate": 8.35322599124272e-09, + "loss": 0.573, + "step": 23444 + }, + { + "epoch": 0.9674424362465956, + "grad_norm": 13.76259550715783, + "learning_rate": 8.332111555574673e-09, + "loss": 0.4935, + "step": 23445 + }, + { + "epoch": 0.9674837005859536, + "grad_norm": 11.421360095315048, + "learning_rate": 8.31102376482673e-09, + "loss": 0.5414, + "step": 23446 + }, + { + "epoch": 0.9675249649253116, + "grad_norm": 2.7243594391067485, + "learning_rate": 8.28996261937559e-09, + "loss": 0.5483, + "step": 23447 + }, + { + "epoch": 0.9675662292646695, + "grad_norm": 6.1238096004968945, + "learning_rate": 8.268928119597285e-09, + "loss": 0.4573, + "step": 23448 + }, + { + "epoch": 0.9676074936040274, + "grad_norm": 4.417214824688722, + "learning_rate": 8.24792026586768e-09, + "loss": 0.5408, + "step": 23449 + }, + { + "epoch": 0.9676487579433853, + "grad_norm": 11.59940455828151, + "learning_rate": 8.226939058562144e-09, + "loss": 0.5327, + "step": 23450 + }, + { + "epoch": 0.9676900222827433, + "grad_norm": 2.5048086812679697, + "learning_rate": 8.205984498055208e-09, + "loss": 0.5044, + "step": 23451 + }, + { + "epoch": 0.9677312866221012, + "grad_norm": 5.15593818518428, + "learning_rate": 8.18505658472124e-09, + "loss": 0.527, + "step": 23452 + }, + { + "epoch": 0.9677725509614591, + "grad_norm": 3.3382173259294126, + "learning_rate": 8.164155318934274e-09, + "loss": 0.5172, + "step": 23453 + }, + { + "epoch": 0.967813815300817, + "grad_norm": 3.1886657950966897, + "learning_rate": 8.143280701067347e-09, + "loss": 0.4528, + "step": 23454 + }, + { + "epoch": 0.9678550796401749, + "grad_norm": 3.5048424452263838, + "learning_rate": 8.12243273149349e-09, + "loss": 0.4492, + "step": 23455 + }, + { + "epoch": 0.9678963439795328, + "grad_norm": 2.0676629334465515, + "learning_rate": 8.101611410585075e-09, + "loss": 0.4854, + "step": 23456 + }, + { + "epoch": 0.9679376083188909, + "grad_norm": 3.095089497951602, + "learning_rate": 8.080816738714136e-09, + "loss": 0.4906, + "step": 23457 + }, + { + "epoch": 0.9679788726582488, + "grad_norm": 2.2101322520268303, + "learning_rate": 8.060048716252044e-09, + "loss": 0.5156, + "step": 23458 + }, + { + "epoch": 0.9680201369976067, + "grad_norm": 5.8378018774479825, + "learning_rate": 8.039307343569668e-09, + "loss": 0.5161, + "step": 23459 + }, + { + "epoch": 0.9680614013369646, + "grad_norm": 4.9060141779015565, + "learning_rate": 8.018592621037545e-09, + "loss": 0.5073, + "step": 23460 + }, + { + "epoch": 0.9681026656763225, + "grad_norm": 3.7252810232921933, + "learning_rate": 7.99790454902588e-09, + "loss": 0.5325, + "step": 23461 + }, + { + "epoch": 0.9681439300156804, + "grad_norm": 3.556466814588755, + "learning_rate": 7.977243127903876e-09, + "loss": 0.4639, + "step": 23462 + }, + { + "epoch": 0.9681851943550384, + "grad_norm": 2.5254859531138574, + "learning_rate": 7.95660835804074e-09, + "loss": 0.5295, + "step": 23463 + }, + { + "epoch": 0.9682264586943963, + "grad_norm": 2.5964757625991304, + "learning_rate": 7.936000239805174e-09, + "loss": 0.5273, + "step": 23464 + }, + { + "epoch": 0.9682677230337542, + "grad_norm": 7.1402689224899465, + "learning_rate": 7.915418773565387e-09, + "loss": 0.5275, + "step": 23465 + }, + { + "epoch": 0.9683089873731121, + "grad_norm": 2.1606128068494885, + "learning_rate": 7.894863959688748e-09, + "loss": 0.5129, + "step": 23466 + }, + { + "epoch": 0.9683502517124701, + "grad_norm": 3.4281086040454603, + "learning_rate": 7.874335798542464e-09, + "loss": 0.5518, + "step": 23467 + }, + { + "epoch": 0.968391516051828, + "grad_norm": 4.450777236558669, + "learning_rate": 7.853834290493245e-09, + "loss": 0.5244, + "step": 23468 + }, + { + "epoch": 0.968432780391186, + "grad_norm": 2.193038446687481, + "learning_rate": 7.833359435907295e-09, + "loss": 0.5252, + "step": 23469 + }, + { + "epoch": 0.9684740447305439, + "grad_norm": 2.4065434191264448, + "learning_rate": 7.812911235150488e-09, + "loss": 0.4934, + "step": 23470 + }, + { + "epoch": 0.9685153090699018, + "grad_norm": 5.773085107004684, + "learning_rate": 7.792489688587867e-09, + "loss": 0.4943, + "step": 23471 + }, + { + "epoch": 0.9685565734092597, + "grad_norm": 2.3046716653470383, + "learning_rate": 7.772094796584305e-09, + "loss": 0.4503, + "step": 23472 + }, + { + "epoch": 0.9685978377486176, + "grad_norm": 4.087082236078484, + "learning_rate": 7.751726559504347e-09, + "loss": 0.4801, + "step": 23473 + }, + { + "epoch": 0.9686391020879755, + "grad_norm": 45.689169386016054, + "learning_rate": 7.731384977711364e-09, + "loss": 0.5047, + "step": 23474 + }, + { + "epoch": 0.9686803664273335, + "grad_norm": 2.2981594310965865, + "learning_rate": 7.711070051568902e-09, + "loss": 0.5113, + "step": 23475 + }, + { + "epoch": 0.9687216307666914, + "grad_norm": 16.065006673315597, + "learning_rate": 7.690781781439838e-09, + "loss": 0.4944, + "step": 23476 + }, + { + "epoch": 0.9687628951060494, + "grad_norm": 2.2156631001173643, + "learning_rate": 7.670520167686878e-09, + "loss": 0.4978, + "step": 23477 + }, + { + "epoch": 0.9688041594454073, + "grad_norm": 3.096903497938148, + "learning_rate": 7.650285210671403e-09, + "loss": 0.5462, + "step": 23478 + }, + { + "epoch": 0.9688454237847652, + "grad_norm": 2.634332706123309, + "learning_rate": 7.630076910755291e-09, + "loss": 0.4954, + "step": 23479 + }, + { + "epoch": 0.9688866881241232, + "grad_norm": 2.55280506351059, + "learning_rate": 7.609895268299084e-09, + "loss": 0.4811, + "step": 23480 + }, + { + "epoch": 0.9689279524634811, + "grad_norm": 4.3199938510798015, + "learning_rate": 7.589740283663827e-09, + "loss": 0.5264, + "step": 23481 + }, + { + "epoch": 0.968969216802839, + "grad_norm": 2.0248150070923674, + "learning_rate": 7.569611957209066e-09, + "loss": 0.5015, + "step": 23482 + }, + { + "epoch": 0.9690104811421969, + "grad_norm": 3.4086395435400765, + "learning_rate": 7.549510289294515e-09, + "loss": 0.5355, + "step": 23483 + }, + { + "epoch": 0.9690517454815548, + "grad_norm": 3.4622169559596467, + "learning_rate": 7.529435280279384e-09, + "loss": 0.5285, + "step": 23484 + }, + { + "epoch": 0.9690930098209127, + "grad_norm": 4.254504320367667, + "learning_rate": 7.509386930522055e-09, + "loss": 0.5587, + "step": 23485 + }, + { + "epoch": 0.9691342741602706, + "grad_norm": 2.1195033144449984, + "learning_rate": 7.489365240380741e-09, + "loss": 0.5104, + "step": 23486 + }, + { + "epoch": 0.9691755384996287, + "grad_norm": 3.464025969895417, + "learning_rate": 7.469370210212988e-09, + "loss": 0.5024, + "step": 23487 + }, + { + "epoch": 0.9692168028389866, + "grad_norm": 9.60572342042942, + "learning_rate": 7.4494018403760134e-09, + "loss": 0.5379, + "step": 23488 + }, + { + "epoch": 0.9692580671783445, + "grad_norm": 2.2082539238746755, + "learning_rate": 7.429460131226529e-09, + "loss": 0.4664, + "step": 23489 + }, + { + "epoch": 0.9692993315177024, + "grad_norm": 3.0881542139117757, + "learning_rate": 7.409545083120917e-09, + "loss": 0.4734, + "step": 23490 + }, + { + "epoch": 0.9693405958570603, + "grad_norm": 2.6772779582685775, + "learning_rate": 7.389656696414393e-09, + "loss": 0.5228, + "step": 23491 + }, + { + "epoch": 0.9693818601964183, + "grad_norm": 2.526670622399643, + "learning_rate": 7.36979497146284e-09, + "loss": 0.5422, + "step": 23492 + }, + { + "epoch": 0.9694231245357762, + "grad_norm": 2.181422933814148, + "learning_rate": 7.349959908620641e-09, + "loss": 0.4594, + "step": 23493 + }, + { + "epoch": 0.9694643888751341, + "grad_norm": 5.319417607705538, + "learning_rate": 7.330151508242178e-09, + "loss": 0.5208, + "step": 23494 + }, + { + "epoch": 0.969505653214492, + "grad_norm": 6.611636727955783, + "learning_rate": 7.310369770681169e-09, + "loss": 0.5748, + "step": 23495 + }, + { + "epoch": 0.9695469175538499, + "grad_norm": 2.9817084812275096, + "learning_rate": 7.290614696291165e-09, + "loss": 0.5096, + "step": 23496 + }, + { + "epoch": 0.9695881818932078, + "grad_norm": 5.265346220038034, + "learning_rate": 7.270886285425049e-09, + "loss": 0.5149, + "step": 23497 + }, + { + "epoch": 0.9696294462325659, + "grad_norm": 2.7665796369952216, + "learning_rate": 7.251184538435207e-09, + "loss": 0.529, + "step": 23498 + }, + { + "epoch": 0.9696707105719238, + "grad_norm": 2.2385768518688973, + "learning_rate": 7.231509455673358e-09, + "loss": 0.5184, + "step": 23499 + }, + { + "epoch": 0.9697119749112817, + "grad_norm": 2.8790494774251902, + "learning_rate": 7.211861037491052e-09, + "loss": 0.5211, + "step": 23500 + }, + { + "epoch": 0.9697532392506396, + "grad_norm": 3.893323451512667, + "learning_rate": 7.192239284239343e-09, + "loss": 0.4939, + "step": 23501 + }, + { + "epoch": 0.9697945035899975, + "grad_norm": 3.115401708343073, + "learning_rate": 7.172644196268785e-09, + "loss": 0.5018, + "step": 23502 + }, + { + "epoch": 0.9698357679293554, + "grad_norm": 3.10279767872982, + "learning_rate": 7.153075773929097e-09, + "loss": 0.4877, + "step": 23503 + }, + { + "epoch": 0.9698770322687134, + "grad_norm": 7.015170047638265, + "learning_rate": 7.133534017569998e-09, + "loss": 0.4774, + "step": 23504 + }, + { + "epoch": 0.9699182966080713, + "grad_norm": 2.548729131335762, + "learning_rate": 7.114018927540711e-09, + "loss": 0.4658, + "step": 23505 + }, + { + "epoch": 0.9699595609474292, + "grad_norm": 3.0298294501322727, + "learning_rate": 7.094530504189623e-09, + "loss": 0.4604, + "step": 23506 + }, + { + "epoch": 0.9700008252867871, + "grad_norm": 3.796594407844393, + "learning_rate": 7.075068747864954e-09, + "loss": 0.5286, + "step": 23507 + }, + { + "epoch": 0.9700420896261451, + "grad_norm": 2.3828681174282718, + "learning_rate": 7.055633658914262e-09, + "loss": 0.5083, + "step": 23508 + }, + { + "epoch": 0.970083353965503, + "grad_norm": 6.931344616689339, + "learning_rate": 7.036225237684602e-09, + "loss": 0.571, + "step": 23509 + }, + { + "epoch": 0.970124618304861, + "grad_norm": 2.6941693898046783, + "learning_rate": 7.016843484522862e-09, + "loss": 0.5391, + "step": 23510 + }, + { + "epoch": 0.9701658826442189, + "grad_norm": 9.90532004511548, + "learning_rate": 6.997488399775265e-09, + "loss": 0.4765, + "step": 23511 + }, + { + "epoch": 0.9702071469835768, + "grad_norm": 2.0772003697099226, + "learning_rate": 6.9781599837873686e-09, + "loss": 0.615, + "step": 23512 + }, + { + "epoch": 0.9702484113229347, + "grad_norm": 2.126240242900338, + "learning_rate": 6.9588582369043974e-09, + "loss": 0.5345, + "step": 23513 + }, + { + "epoch": 0.9702896756622926, + "grad_norm": 3.0075547940027176, + "learning_rate": 6.939583159471407e-09, + "loss": 0.4963, + "step": 23514 + }, + { + "epoch": 0.9703309400016505, + "grad_norm": 2.7068634660693767, + "learning_rate": 6.9203347518324574e-09, + "loss": 0.4866, + "step": 23515 + }, + { + "epoch": 0.9703722043410085, + "grad_norm": 1.802550979104897, + "learning_rate": 6.901113014331439e-09, + "loss": 0.4797, + "step": 23516 + }, + { + "epoch": 0.9704134686803664, + "grad_norm": 3.811763526084804, + "learning_rate": 6.881917947311578e-09, + "loss": 0.4867, + "step": 23517 + }, + { + "epoch": 0.9704547330197244, + "grad_norm": 3.5193776053562145, + "learning_rate": 6.862749551116099e-09, + "loss": 0.5038, + "step": 23518 + }, + { + "epoch": 0.9704959973590823, + "grad_norm": 6.10070474154859, + "learning_rate": 6.843607826087062e-09, + "loss": 0.553, + "step": 23519 + }, + { + "epoch": 0.9705372616984402, + "grad_norm": 3.0366652842135142, + "learning_rate": 6.824492772566526e-09, + "loss": 0.4871, + "step": 23520 + }, + { + "epoch": 0.9705785260377982, + "grad_norm": 3.109861100387549, + "learning_rate": 6.805404390895887e-09, + "loss": 0.4461, + "step": 23521 + }, + { + "epoch": 0.9706197903771561, + "grad_norm": 4.750025695495008, + "learning_rate": 6.7863426814160355e-09, + "loss": 0.5357, + "step": 23522 + }, + { + "epoch": 0.970661054716514, + "grad_norm": 2.8369324862364342, + "learning_rate": 6.767307644467702e-09, + "loss": 0.4863, + "step": 23523 + }, + { + "epoch": 0.9707023190558719, + "grad_norm": 7.469119412907417, + "learning_rate": 6.748299280390613e-09, + "loss": 0.4772, + "step": 23524 + }, + { + "epoch": 0.9707435833952298, + "grad_norm": 3.726770696642909, + "learning_rate": 6.729317589524498e-09, + "loss": 0.4826, + "step": 23525 + }, + { + "epoch": 0.9707848477345877, + "grad_norm": 1.927077833470246, + "learning_rate": 6.710362572208417e-09, + "loss": 0.5358, + "step": 23526 + }, + { + "epoch": 0.9708261120739456, + "grad_norm": 3.0911873404751926, + "learning_rate": 6.691434228780768e-09, + "loss": 0.5052, + "step": 23527 + }, + { + "epoch": 0.9708673764133037, + "grad_norm": 9.838586413333031, + "learning_rate": 6.672532559579947e-09, + "loss": 0.4911, + "step": 23528 + }, + { + "epoch": 0.9709086407526616, + "grad_norm": 5.197438858936259, + "learning_rate": 6.65365756494335e-09, + "loss": 0.5285, + "step": 23529 + }, + { + "epoch": 0.9709499050920195, + "grad_norm": 2.6229596946815916, + "learning_rate": 6.634809245208206e-09, + "loss": 0.5005, + "step": 23530 + }, + { + "epoch": 0.9709911694313774, + "grad_norm": 2.5888407758703824, + "learning_rate": 6.6159876007112484e-09, + "loss": 0.4542, + "step": 23531 + }, + { + "epoch": 0.9710324337707353, + "grad_norm": 6.198639795906123, + "learning_rate": 6.59719263178854e-09, + "loss": 0.5046, + "step": 23532 + }, + { + "epoch": 0.9710736981100933, + "grad_norm": 2.000233689812275, + "learning_rate": 6.578424338775979e-09, + "loss": 0.5313, + "step": 23533 + }, + { + "epoch": 0.9711149624494512, + "grad_norm": 3.776249514814645, + "learning_rate": 6.559682722008631e-09, + "loss": 0.4819, + "step": 23534 + }, + { + "epoch": 0.9711562267888091, + "grad_norm": 2.568379624159746, + "learning_rate": 6.5409677818215605e-09, + "loss": 0.4943, + "step": 23535 + }, + { + "epoch": 0.971197491128167, + "grad_norm": 1.9891938484910001, + "learning_rate": 6.5222795185486685e-09, + "loss": 0.4905, + "step": 23536 + }, + { + "epoch": 0.9712387554675249, + "grad_norm": 2.243588833340942, + "learning_rate": 6.50361793252402e-09, + "loss": 0.5588, + "step": 23537 + }, + { + "epoch": 0.9712800198068829, + "grad_norm": 11.143850087172424, + "learning_rate": 6.484983024080848e-09, + "loss": 0.543, + "step": 23538 + }, + { + "epoch": 0.9713212841462409, + "grad_norm": 2.257619326029723, + "learning_rate": 6.466374793552221e-09, + "loss": 0.462, + "step": 23539 + }, + { + "epoch": 0.9713625484855988, + "grad_norm": 7.746633594390925, + "learning_rate": 6.447793241270206e-09, + "loss": 0.5056, + "step": 23540 + }, + { + "epoch": 0.9714038128249567, + "grad_norm": 6.0444370549074, + "learning_rate": 6.429238367567036e-09, + "loss": 0.4695, + "step": 23541 + }, + { + "epoch": 0.9714450771643146, + "grad_norm": 11.216604167827962, + "learning_rate": 6.410710172773948e-09, + "loss": 0.5178, + "step": 23542 + }, + { + "epoch": 0.9714863415036725, + "grad_norm": 3.1813760097191377, + "learning_rate": 6.392208657222176e-09, + "loss": 0.5367, + "step": 23543 + }, + { + "epoch": 0.9715276058430304, + "grad_norm": 2.693670849424517, + "learning_rate": 6.37373382124179e-09, + "loss": 0.5198, + "step": 23544 + }, + { + "epoch": 0.9715688701823884, + "grad_norm": 2.506231960495865, + "learning_rate": 6.355285665163024e-09, + "loss": 0.5428, + "step": 23545 + }, + { + "epoch": 0.9716101345217463, + "grad_norm": 4.046459270420871, + "learning_rate": 6.336864189315284e-09, + "loss": 0.5173, + "step": 23546 + }, + { + "epoch": 0.9716513988611042, + "grad_norm": 2.7361521150696757, + "learning_rate": 6.31846939402797e-09, + "loss": 0.5324, + "step": 23547 + }, + { + "epoch": 0.9716926632004622, + "grad_norm": 2.8795505429915442, + "learning_rate": 6.300101279629156e-09, + "loss": 0.4389, + "step": 23548 + }, + { + "epoch": 0.9717339275398201, + "grad_norm": 24.942519167743306, + "learning_rate": 6.28175984644741e-09, + "loss": 0.4721, + "step": 23549 + }, + { + "epoch": 0.971775191879178, + "grad_norm": 25.622105850321645, + "learning_rate": 6.263445094809972e-09, + "loss": 0.535, + "step": 23550 + }, + { + "epoch": 0.971816456218536, + "grad_norm": 2.4039141568745093, + "learning_rate": 6.245157025044246e-09, + "loss": 0.499, + "step": 23551 + }, + { + "epoch": 0.9718577205578939, + "grad_norm": 2.5771950911259034, + "learning_rate": 6.226895637476804e-09, + "loss": 0.5466, + "step": 23552 + }, + { + "epoch": 0.9718989848972518, + "grad_norm": 3.0093057239430205, + "learning_rate": 6.208660932433885e-09, + "loss": 0.4682, + "step": 23553 + }, + { + "epoch": 0.9719402492366097, + "grad_norm": 4.5685460646632645, + "learning_rate": 6.1904529102410626e-09, + "loss": 0.5559, + "step": 23554 + }, + { + "epoch": 0.9719815135759676, + "grad_norm": 4.856088126355385, + "learning_rate": 6.172271571223909e-09, + "loss": 0.5184, + "step": 23555 + }, + { + "epoch": 0.9720227779153255, + "grad_norm": 3.3881985694488117, + "learning_rate": 6.1541169157068315e-09, + "loss": 0.4945, + "step": 23556 + }, + { + "epoch": 0.9720640422546835, + "grad_norm": 3.455224852932987, + "learning_rate": 6.135988944014237e-09, + "loss": 0.5148, + "step": 23557 + }, + { + "epoch": 0.9721053065940414, + "grad_norm": 4.671801371076011, + "learning_rate": 6.117887656470033e-09, + "loss": 0.4861, + "step": 23558 + }, + { + "epoch": 0.9721465709333994, + "grad_norm": 6.067337687700571, + "learning_rate": 6.099813053397296e-09, + "loss": 0.4961, + "step": 23559 + }, + { + "epoch": 0.9721878352727573, + "grad_norm": 3.1747591083510627, + "learning_rate": 6.081765135119266e-09, + "loss": 0.5194, + "step": 23560 + }, + { + "epoch": 0.9722290996121152, + "grad_norm": 3.968426764923727, + "learning_rate": 6.063743901958018e-09, + "loss": 0.5434, + "step": 23561 + }, + { + "epoch": 0.9722703639514731, + "grad_norm": 3.469719538433297, + "learning_rate": 6.04574935423563e-09, + "loss": 0.4631, + "step": 23562 + }, + { + "epoch": 0.9723116282908311, + "grad_norm": 2.3199734296507777, + "learning_rate": 6.027781492273343e-09, + "loss": 0.5376, + "step": 23563 + }, + { + "epoch": 0.972352892630189, + "grad_norm": 2.490056471014832, + "learning_rate": 6.009840316392401e-09, + "loss": 0.527, + "step": 23564 + }, + { + "epoch": 0.9723941569695469, + "grad_norm": 3.9561099973621063, + "learning_rate": 5.991925826912881e-09, + "loss": 0.5226, + "step": 23565 + }, + { + "epoch": 0.9724354213089048, + "grad_norm": 2.4629677236586485, + "learning_rate": 5.9740380241551926e-09, + "loss": 0.5065, + "step": 23566 + }, + { + "epoch": 0.9724766856482627, + "grad_norm": 5.37465533138017, + "learning_rate": 5.956176908438582e-09, + "loss": 0.506, + "step": 23567 + }, + { + "epoch": 0.9725179499876206, + "grad_norm": 3.143638463363811, + "learning_rate": 5.938342480082126e-09, + "loss": 0.4951, + "step": 23568 + }, + { + "epoch": 0.9725592143269787, + "grad_norm": 2.2335052855838553, + "learning_rate": 5.92053473940457e-09, + "loss": 0.5239, + "step": 23569 + }, + { + "epoch": 0.9726004786663366, + "grad_norm": 2.4673386076443173, + "learning_rate": 5.902753686723661e-09, + "loss": 0.512, + "step": 23570 + }, + { + "epoch": 0.9726417430056945, + "grad_norm": 2.6945567989308734, + "learning_rate": 5.884999322357476e-09, + "loss": 0.5794, + "step": 23571 + }, + { + "epoch": 0.9726830073450524, + "grad_norm": 4.727379984821364, + "learning_rate": 5.8672716466229295e-09, + "loss": 0.4951, + "step": 23572 + }, + { + "epoch": 0.9727242716844103, + "grad_norm": 3.0724746345413196, + "learning_rate": 5.849570659836434e-09, + "loss": 0.5609, + "step": 23573 + }, + { + "epoch": 0.9727655360237683, + "grad_norm": 2.640586011785216, + "learning_rate": 5.83189636231457e-09, + "loss": 0.4984, + "step": 23574 + }, + { + "epoch": 0.9728068003631262, + "grad_norm": 1.9296589717078334, + "learning_rate": 5.814248754372753e-09, + "loss": 0.4899, + "step": 23575 + }, + { + "epoch": 0.9728480647024841, + "grad_norm": 17.99361870785849, + "learning_rate": 5.796627836326396e-09, + "loss": 0.5174, + "step": 23576 + }, + { + "epoch": 0.972889329041842, + "grad_norm": 4.55348337414107, + "learning_rate": 5.779033608490247e-09, + "loss": 0.5303, + "step": 23577 + }, + { + "epoch": 0.9729305933811999, + "grad_norm": 2.572097712309134, + "learning_rate": 5.7614660711783895e-09, + "loss": 0.4832, + "step": 23578 + }, + { + "epoch": 0.9729718577205579, + "grad_norm": 2.2513615760755012, + "learning_rate": 5.743925224704738e-09, + "loss": 0.5346, + "step": 23579 + }, + { + "epoch": 0.9730131220599159, + "grad_norm": 19.750663498949365, + "learning_rate": 5.7264110693828756e-09, + "loss": 0.486, + "step": 23580 + }, + { + "epoch": 0.9730543863992738, + "grad_norm": 3.6399112695558427, + "learning_rate": 5.708923605525218e-09, + "loss": 0.4982, + "step": 23581 + }, + { + "epoch": 0.9730956507386317, + "grad_norm": 3.038263394008732, + "learning_rate": 5.6914628334443496e-09, + "loss": 0.473, + "step": 23582 + }, + { + "epoch": 0.9731369150779896, + "grad_norm": 7.611348411176123, + "learning_rate": 5.674028753452187e-09, + "loss": 0.5474, + "step": 23583 + }, + { + "epoch": 0.9731781794173475, + "grad_norm": 4.173382120720924, + "learning_rate": 5.656621365860148e-09, + "loss": 0.5482, + "step": 23584 + }, + { + "epoch": 0.9732194437567054, + "grad_norm": 12.49598493877004, + "learning_rate": 5.639240670978984e-09, + "loss": 0.555, + "step": 23585 + }, + { + "epoch": 0.9732607080960634, + "grad_norm": 11.159828195416958, + "learning_rate": 5.621886669119447e-09, + "loss": 0.552, + "step": 23586 + }, + { + "epoch": 0.9733019724354213, + "grad_norm": 5.7963739267727155, + "learning_rate": 5.604559360591288e-09, + "loss": 0.4866, + "step": 23587 + }, + { + "epoch": 0.9733432367747792, + "grad_norm": 3.4421234782576264, + "learning_rate": 5.587258745704093e-09, + "loss": 0.4927, + "step": 23588 + }, + { + "epoch": 0.9733845011141372, + "grad_norm": 2.640420077859926, + "learning_rate": 5.5699848247669494e-09, + "loss": 0.4953, + "step": 23589 + }, + { + "epoch": 0.9734257654534951, + "grad_norm": 3.552017422597207, + "learning_rate": 5.552737598088276e-09, + "loss": 0.441, + "step": 23590 + }, + { + "epoch": 0.973467029792853, + "grad_norm": 2.934631050593369, + "learning_rate": 5.535517065976326e-09, + "loss": 0.5287, + "step": 23591 + }, + { + "epoch": 0.973508294132211, + "grad_norm": 2.94039826303555, + "learning_rate": 5.518323228738686e-09, + "loss": 0.4857, + "step": 23592 + }, + { + "epoch": 0.9735495584715689, + "grad_norm": 5.95853327495641, + "learning_rate": 5.50115608668228e-09, + "loss": 0.544, + "step": 23593 + }, + { + "epoch": 0.9735908228109268, + "grad_norm": 2.534412551409488, + "learning_rate": 5.484015640114026e-09, + "loss": 0.4585, + "step": 23594 + }, + { + "epoch": 0.9736320871502847, + "grad_norm": 4.438999889230069, + "learning_rate": 5.466901889339848e-09, + "loss": 0.6119, + "step": 23595 + }, + { + "epoch": 0.9736733514896426, + "grad_norm": 10.11405161057737, + "learning_rate": 5.449814834665668e-09, + "loss": 0.5399, + "step": 23596 + }, + { + "epoch": 0.9737146158290005, + "grad_norm": 2.1544832654961175, + "learning_rate": 5.432754476396573e-09, + "loss": 0.518, + "step": 23597 + }, + { + "epoch": 0.9737558801683585, + "grad_norm": 2.7786221816975356, + "learning_rate": 5.415720814837155e-09, + "loss": 0.5555, + "step": 23598 + }, + { + "epoch": 0.9737971445077165, + "grad_norm": 10.333884545512202, + "learning_rate": 5.398713850292003e-09, + "loss": 0.521, + "step": 23599 + }, + { + "epoch": 0.9738384088470744, + "grad_norm": 3.4242582936148516, + "learning_rate": 5.381733583064541e-09, + "loss": 0.5228, + "step": 23600 + }, + { + "epoch": 0.9738796731864323, + "grad_norm": 14.03813256244546, + "learning_rate": 5.364780013458525e-09, + "loss": 0.4837, + "step": 23601 + }, + { + "epoch": 0.9739209375257902, + "grad_norm": 3.2877243397626006, + "learning_rate": 5.347853141776382e-09, + "loss": 0.5535, + "step": 23602 + }, + { + "epoch": 0.9739622018651481, + "grad_norm": 2.605702663478972, + "learning_rate": 5.330952968320701e-09, + "loss": 0.4769, + "step": 23603 + }, + { + "epoch": 0.9740034662045061, + "grad_norm": 2.151428368741428, + "learning_rate": 5.3140794933932424e-09, + "loss": 0.5139, + "step": 23604 + }, + { + "epoch": 0.974044730543864, + "grad_norm": 2.2660125426571582, + "learning_rate": 5.297232717295431e-09, + "loss": 0.5631, + "step": 23605 + }, + { + "epoch": 0.9740859948832219, + "grad_norm": 4.7119716149603885, + "learning_rate": 5.280412640328358e-09, + "loss": 0.5377, + "step": 23606 + }, + { + "epoch": 0.9741272592225798, + "grad_norm": 3.010855028845345, + "learning_rate": 5.2636192627921185e-09, + "loss": 0.5219, + "step": 23607 + }, + { + "epoch": 0.9741685235619377, + "grad_norm": 2.2068318899291257, + "learning_rate": 5.246852584986972e-09, + "loss": 0.5272, + "step": 23608 + }, + { + "epoch": 0.9742097879012958, + "grad_norm": 2.9116765238497386, + "learning_rate": 5.230112607212512e-09, + "loss": 0.4859, + "step": 23609 + }, + { + "epoch": 0.9742510522406537, + "grad_norm": 4.658523043790955, + "learning_rate": 5.213399329767499e-09, + "loss": 0.5427, + "step": 23610 + }, + { + "epoch": 0.9742923165800116, + "grad_norm": 5.8544587989701, + "learning_rate": 5.196712752950528e-09, + "loss": 0.4966, + "step": 23611 + }, + { + "epoch": 0.9743335809193695, + "grad_norm": 7.0909190124435595, + "learning_rate": 5.1800528770595265e-09, + "loss": 0.4306, + "step": 23612 + }, + { + "epoch": 0.9743748452587274, + "grad_norm": 5.037773242634841, + "learning_rate": 5.163419702392425e-09, + "loss": 0.5069, + "step": 23613 + }, + { + "epoch": 0.9744161095980853, + "grad_norm": 2.2355255555997684, + "learning_rate": 5.14681322924615e-09, + "loss": 0.4994, + "step": 23614 + }, + { + "epoch": 0.9744573739374432, + "grad_norm": 2.5512716789739542, + "learning_rate": 5.130233457917299e-09, + "loss": 0.5203, + "step": 23615 + }, + { + "epoch": 0.9744986382768012, + "grad_norm": 2.639345955377715, + "learning_rate": 5.113680388702136e-09, + "loss": 0.5201, + "step": 23616 + }, + { + "epoch": 0.9745399026161591, + "grad_norm": 2.810863650555397, + "learning_rate": 5.097154021896255e-09, + "loss": 0.4389, + "step": 23617 + }, + { + "epoch": 0.974581166955517, + "grad_norm": 2.9698672438579825, + "learning_rate": 5.080654357794756e-09, + "loss": 0.5224, + "step": 23618 + }, + { + "epoch": 0.9746224312948749, + "grad_norm": 2.1145249692458283, + "learning_rate": 5.0641813966925686e-09, + "loss": 0.5044, + "step": 23619 + }, + { + "epoch": 0.9746636956342329, + "grad_norm": 3.298040853335136, + "learning_rate": 5.047735138883958e-09, + "loss": 0.5, + "step": 23620 + }, + { + "epoch": 0.9747049599735909, + "grad_norm": 5.739079201022788, + "learning_rate": 5.0313155846625215e-09, + "loss": 0.5784, + "step": 23621 + }, + { + "epoch": 0.9747462243129488, + "grad_norm": 4.570741555719894, + "learning_rate": 5.014922734321525e-09, + "loss": 0.5348, + "step": 23622 + }, + { + "epoch": 0.9747874886523067, + "grad_norm": 2.219217371810616, + "learning_rate": 4.998556588154069e-09, + "loss": 0.5018, + "step": 23623 + }, + { + "epoch": 0.9748287529916646, + "grad_norm": 3.9196732213950973, + "learning_rate": 4.982217146452251e-09, + "loss": 0.5029, + "step": 23624 + }, + { + "epoch": 0.9748700173310225, + "grad_norm": 3.566809789156573, + "learning_rate": 4.965904409507838e-09, + "loss": 0.5482, + "step": 23625 + }, + { + "epoch": 0.9749112816703804, + "grad_norm": 6.187750404662491, + "learning_rate": 4.949618377612597e-09, + "loss": 0.4919, + "step": 23626 + }, + { + "epoch": 0.9749525460097384, + "grad_norm": 5.932507571356644, + "learning_rate": 4.933359051057129e-09, + "loss": 0.5036, + "step": 23627 + }, + { + "epoch": 0.9749938103490963, + "grad_norm": 4.528639395319105, + "learning_rate": 4.917126430131869e-09, + "loss": 0.4878, + "step": 23628 + }, + { + "epoch": 0.9750350746884542, + "grad_norm": 2.5684535664038624, + "learning_rate": 4.900920515126916e-09, + "loss": 0.4777, + "step": 23629 + }, + { + "epoch": 0.9750763390278122, + "grad_norm": 2.965896209105478, + "learning_rate": 4.8847413063317084e-09, + "loss": 0.4913, + "step": 23630 + }, + { + "epoch": 0.9751176033671701, + "grad_norm": 2.404562122948404, + "learning_rate": 4.868588804035179e-09, + "loss": 0.4535, + "step": 23631 + }, + { + "epoch": 0.975158867706528, + "grad_norm": 3.3832530354496555, + "learning_rate": 4.8524630085259315e-09, + "loss": 0.5137, + "step": 23632 + }, + { + "epoch": 0.975200132045886, + "grad_norm": 12.729568618101501, + "learning_rate": 4.836363920092069e-09, + "loss": 0.528, + "step": 23633 + }, + { + "epoch": 0.9752413963852439, + "grad_norm": 3.2911655365609107, + "learning_rate": 4.820291539021027e-09, + "loss": 0.5339, + "step": 23634 + }, + { + "epoch": 0.9752826607246018, + "grad_norm": 4.77883249136482, + "learning_rate": 4.80424586559991e-09, + "loss": 0.5247, + "step": 23635 + }, + { + "epoch": 0.9753239250639597, + "grad_norm": 2.862310140856176, + "learning_rate": 4.788226900115322e-09, + "loss": 0.537, + "step": 23636 + }, + { + "epoch": 0.9753651894033176, + "grad_norm": 2.8141756615783624, + "learning_rate": 4.772234642853534e-09, + "loss": 0.5978, + "step": 23637 + }, + { + "epoch": 0.9754064537426755, + "grad_norm": 3.101023030985002, + "learning_rate": 4.756269094100152e-09, + "loss": 0.5301, + "step": 23638 + }, + { + "epoch": 0.9754477180820335, + "grad_norm": 4.1477312542956835, + "learning_rate": 4.7403302541401126e-09, + "loss": 0.5193, + "step": 23639 + }, + { + "epoch": 0.9754889824213915, + "grad_norm": 3.8938336109831937, + "learning_rate": 4.724418123258523e-09, + "loss": 0.4829, + "step": 23640 + }, + { + "epoch": 0.9755302467607494, + "grad_norm": 2.9655950231855104, + "learning_rate": 4.708532701739487e-09, + "loss": 0.5679, + "step": 23641 + }, + { + "epoch": 0.9755715111001073, + "grad_norm": 2.955227445011403, + "learning_rate": 4.692673989866614e-09, + "loss": 0.4866, + "step": 23642 + }, + { + "epoch": 0.9756127754394652, + "grad_norm": 4.517932569832529, + "learning_rate": 4.676841987923175e-09, + "loss": 0.5569, + "step": 23643 + }, + { + "epoch": 0.9756540397788231, + "grad_norm": 2.49132932058849, + "learning_rate": 4.661036696192112e-09, + "loss": 0.5371, + "step": 23644 + }, + { + "epoch": 0.9756953041181811, + "grad_norm": 2.4214300591079105, + "learning_rate": 4.645258114955864e-09, + "loss": 0.4953, + "step": 23645 + }, + { + "epoch": 0.975736568457539, + "grad_norm": 5.31618645477696, + "learning_rate": 4.62950624449604e-09, + "loss": 0.6038, + "step": 23646 + }, + { + "epoch": 0.9757778327968969, + "grad_norm": 3.9644955136872673, + "learning_rate": 4.6137810850939155e-09, + "loss": 0.5329, + "step": 23647 + }, + { + "epoch": 0.9758190971362548, + "grad_norm": 2.937420574577915, + "learning_rate": 4.598082637030765e-09, + "loss": 0.4952, + "step": 23648 + }, + { + "epoch": 0.9758603614756127, + "grad_norm": 3.005995002044026, + "learning_rate": 4.582410900586697e-09, + "loss": 0.4901, + "step": 23649 + }, + { + "epoch": 0.9759016258149708, + "grad_norm": 3.3365356241961144, + "learning_rate": 4.566765876041656e-09, + "loss": 0.4913, + "step": 23650 + }, + { + "epoch": 0.9759428901543287, + "grad_norm": 3.407968410217073, + "learning_rate": 4.5511475636752506e-09, + "loss": 0.4478, + "step": 23651 + }, + { + "epoch": 0.9759841544936866, + "grad_norm": 3.4714525058442525, + "learning_rate": 4.5355559637664244e-09, + "loss": 0.4637, + "step": 23652 + }, + { + "epoch": 0.9760254188330445, + "grad_norm": 2.6258562876827303, + "learning_rate": 4.519991076593455e-09, + "loss": 0.4542, + "step": 23653 + }, + { + "epoch": 0.9760666831724024, + "grad_norm": 3.744632880841017, + "learning_rate": 4.504452902434786e-09, + "loss": 0.4835, + "step": 23654 + }, + { + "epoch": 0.9761079475117603, + "grad_norm": 3.1520626462211103, + "learning_rate": 4.488941441567695e-09, + "loss": 0.5386, + "step": 23655 + }, + { + "epoch": 0.9761492118511182, + "grad_norm": 3.041393012723898, + "learning_rate": 4.473456694269296e-09, + "loss": 0.508, + "step": 23656 + }, + { + "epoch": 0.9761904761904762, + "grad_norm": 3.273213964262775, + "learning_rate": 4.457998660816031e-09, + "loss": 0.5064, + "step": 23657 + }, + { + "epoch": 0.9762317405298341, + "grad_norm": 3.2784392293247935, + "learning_rate": 4.442567341484349e-09, + "loss": 0.538, + "step": 23658 + }, + { + "epoch": 0.976273004869192, + "grad_norm": 3.4244297928904857, + "learning_rate": 4.4271627365496945e-09, + "loss": 0.5327, + "step": 23659 + }, + { + "epoch": 0.97631426920855, + "grad_norm": 4.866439851995551, + "learning_rate": 4.411784846287348e-09, + "loss": 0.4869, + "step": 23660 + }, + { + "epoch": 0.9763555335479079, + "grad_norm": 3.101143089266229, + "learning_rate": 4.396433670971756e-09, + "loss": 0.4867, + "step": 23661 + }, + { + "epoch": 0.9763967978872659, + "grad_norm": 3.962013139167512, + "learning_rate": 4.381109210877365e-09, + "loss": 0.4778, + "step": 23662 + }, + { + "epoch": 0.9764380622266238, + "grad_norm": 2.5598548894518847, + "learning_rate": 4.365811466277958e-09, + "loss": 0.4919, + "step": 23663 + }, + { + "epoch": 0.9764793265659817, + "grad_norm": 4.46422378392791, + "learning_rate": 4.3505404374464815e-09, + "loss": 0.5006, + "step": 23664 + }, + { + "epoch": 0.9765205909053396, + "grad_norm": 2.3067574582368535, + "learning_rate": 4.3352961246558855e-09, + "loss": 0.5291, + "step": 23665 + }, + { + "epoch": 0.9765618552446975, + "grad_norm": 2.8269837848220463, + "learning_rate": 4.320078528178617e-09, + "loss": 0.5326, + "step": 23666 + }, + { + "epoch": 0.9766031195840554, + "grad_norm": 5.585465872026568, + "learning_rate": 4.304887648286293e-09, + "loss": 0.5043, + "step": 23667 + }, + { + "epoch": 0.9766443839234134, + "grad_norm": 11.333293708432548, + "learning_rate": 4.289723485250363e-09, + "loss": 0.5334, + "step": 23668 + }, + { + "epoch": 0.9766856482627713, + "grad_norm": 3.678894579505752, + "learning_rate": 4.2745860393416105e-09, + "loss": 0.5258, + "step": 23669 + }, + { + "epoch": 0.9767269126021293, + "grad_norm": 6.876053579195421, + "learning_rate": 4.2594753108304855e-09, + "loss": 0.4994, + "step": 23670 + }, + { + "epoch": 0.9767681769414872, + "grad_norm": 4.649648799159721, + "learning_rate": 4.2443912999869385e-09, + "loss": 0.4807, + "step": 23671 + }, + { + "epoch": 0.9768094412808451, + "grad_norm": 3.173352787860099, + "learning_rate": 4.229334007080254e-09, + "loss": 0.4356, + "step": 23672 + }, + { + "epoch": 0.976850705620203, + "grad_norm": 2.4322260725335165, + "learning_rate": 4.214303432379551e-09, + "loss": 0.5519, + "step": 23673 + }, + { + "epoch": 0.976891969959561, + "grad_norm": 2.622651905859224, + "learning_rate": 4.199299576153448e-09, + "loss": 0.4888, + "step": 23674 + }, + { + "epoch": 0.9769332342989189, + "grad_norm": 4.450419099011558, + "learning_rate": 4.1843224386695635e-09, + "loss": 0.5263, + "step": 23675 + }, + { + "epoch": 0.9769744986382768, + "grad_norm": 2.9726492695651276, + "learning_rate": 4.16937202019585e-09, + "loss": 0.5386, + "step": 23676 + }, + { + "epoch": 0.9770157629776347, + "grad_norm": 3.8758474448542466, + "learning_rate": 4.154448320998927e-09, + "loss": 0.4934, + "step": 23677 + }, + { + "epoch": 0.9770570273169926, + "grad_norm": 6.350273400964829, + "learning_rate": 4.139551341345749e-09, + "loss": 0.5653, + "step": 23678 + }, + { + "epoch": 0.9770982916563505, + "grad_norm": 1.8303159816438106, + "learning_rate": 4.1246810815022685e-09, + "loss": 0.5306, + "step": 23679 + }, + { + "epoch": 0.9771395559957086, + "grad_norm": 5.038325499339323, + "learning_rate": 4.109837541733941e-09, + "loss": 0.4969, + "step": 23680 + }, + { + "epoch": 0.9771808203350665, + "grad_norm": 4.04618630517259, + "learning_rate": 4.095020722306053e-09, + "loss": 0.5256, + "step": 23681 + }, + { + "epoch": 0.9772220846744244, + "grad_norm": 2.8564160144513426, + "learning_rate": 4.080230623483394e-09, + "loss": 0.4839, + "step": 23682 + }, + { + "epoch": 0.9772633490137823, + "grad_norm": 4.235681330898719, + "learning_rate": 4.065467245530086e-09, + "loss": 0.5145, + "step": 23683 + }, + { + "epoch": 0.9773046133531402, + "grad_norm": 2.8010388173585428, + "learning_rate": 4.050730588709584e-09, + "loss": 0.4577, + "step": 23684 + }, + { + "epoch": 0.9773458776924981, + "grad_norm": 3.4860677632252486, + "learning_rate": 4.036020653285511e-09, + "loss": 0.5171, + "step": 23685 + }, + { + "epoch": 0.9773871420318561, + "grad_norm": 3.3913526902637665, + "learning_rate": 4.021337439520323e-09, + "loss": 0.4925, + "step": 23686 + }, + { + "epoch": 0.977428406371214, + "grad_norm": 15.736900771978545, + "learning_rate": 4.006680947676478e-09, + "loss": 0.5355, + "step": 23687 + }, + { + "epoch": 0.9774696707105719, + "grad_norm": 2.946576693087432, + "learning_rate": 3.992051178015599e-09, + "loss": 0.4974, + "step": 23688 + }, + { + "epoch": 0.9775109350499298, + "grad_norm": 3.364930474529943, + "learning_rate": 3.97744813079931e-09, + "loss": 0.4893, + "step": 23689 + }, + { + "epoch": 0.9775521993892877, + "grad_norm": 4.037764962593884, + "learning_rate": 3.962871806288237e-09, + "loss": 0.4922, + "step": 23690 + }, + { + "epoch": 0.9775934637286458, + "grad_norm": 3.443851182764913, + "learning_rate": 3.948322204742672e-09, + "loss": 0.4617, + "step": 23691 + }, + { + "epoch": 0.9776347280680037, + "grad_norm": 1.9381849113204939, + "learning_rate": 3.933799326422738e-09, + "loss": 0.4988, + "step": 23692 + }, + { + "epoch": 0.9776759924073616, + "grad_norm": 8.682383795343888, + "learning_rate": 3.919303171587563e-09, + "loss": 0.5319, + "step": 23693 + }, + { + "epoch": 0.9777172567467195, + "grad_norm": 3.517176706536949, + "learning_rate": 3.904833740496438e-09, + "loss": 0.4591, + "step": 23694 + }, + { + "epoch": 0.9777585210860774, + "grad_norm": 2.363681967170278, + "learning_rate": 3.8903910334074924e-09, + "loss": 0.517, + "step": 23695 + }, + { + "epoch": 0.9777997854254353, + "grad_norm": 3.73062708037765, + "learning_rate": 3.875975050579017e-09, + "loss": 0.4925, + "step": 23696 + }, + { + "epoch": 0.9778410497647932, + "grad_norm": 2.464837934530821, + "learning_rate": 3.8615857922681406e-09, + "loss": 0.4759, + "step": 23697 + }, + { + "epoch": 0.9778823141041512, + "grad_norm": 12.76483957709655, + "learning_rate": 3.8472232587323245e-09, + "loss": 0.5439, + "step": 23698 + }, + { + "epoch": 0.9779235784435091, + "grad_norm": 5.9701655728351515, + "learning_rate": 3.832887450227862e-09, + "loss": 0.5251, + "step": 23699 + }, + { + "epoch": 0.977964842782867, + "grad_norm": 6.4596019564094505, + "learning_rate": 3.818578367010717e-09, + "loss": 0.5083, + "step": 23700 + }, + { + "epoch": 0.978006107122225, + "grad_norm": 2.935505175287371, + "learning_rate": 3.804296009336683e-09, + "loss": 0.5343, + "step": 23701 + }, + { + "epoch": 0.9780473714615829, + "grad_norm": 3.871863855399371, + "learning_rate": 3.790040377460891e-09, + "loss": 0.5095, + "step": 23702 + }, + { + "epoch": 0.9780886358009409, + "grad_norm": 2.5224192876561364, + "learning_rate": 3.775811471637969e-09, + "loss": 0.5332, + "step": 23703 + }, + { + "epoch": 0.9781299001402988, + "grad_norm": 3.222919390399255, + "learning_rate": 3.761609292122048e-09, + "loss": 0.5087, + "step": 23704 + }, + { + "epoch": 0.9781711644796567, + "grad_norm": 2.5988370186237515, + "learning_rate": 3.747433839166592e-09, + "loss": 0.477, + "step": 23705 + }, + { + "epoch": 0.9782124288190146, + "grad_norm": 4.3483154378483055, + "learning_rate": 3.733285113025231e-09, + "loss": 0.4924, + "step": 23706 + }, + { + "epoch": 0.9782536931583725, + "grad_norm": 2.1795888448210667, + "learning_rate": 3.71916311395043e-09, + "loss": 0.4944, + "step": 23707 + }, + { + "epoch": 0.9782949574977304, + "grad_norm": 4.35728129530059, + "learning_rate": 3.7050678421944872e-09, + "loss": 0.4977, + "step": 23708 + }, + { + "epoch": 0.9783362218370883, + "grad_norm": 3.0300648208541268, + "learning_rate": 3.6909992980092012e-09, + "loss": 0.51, + "step": 23709 + }, + { + "epoch": 0.9783774861764463, + "grad_norm": 3.375260832615765, + "learning_rate": 3.676957481645704e-09, + "loss": 0.516, + "step": 23710 + }, + { + "epoch": 0.9784187505158043, + "grad_norm": 4.103786748143675, + "learning_rate": 3.662942393355129e-09, + "loss": 0.5171, + "step": 23711 + }, + { + "epoch": 0.9784600148551622, + "grad_norm": 4.4462859202296805, + "learning_rate": 3.648954033387608e-09, + "loss": 0.5322, + "step": 23712 + }, + { + "epoch": 0.9785012791945201, + "grad_norm": 3.1167282264231546, + "learning_rate": 3.634992401993109e-09, + "loss": 0.4893, + "step": 23713 + }, + { + "epoch": 0.978542543533878, + "grad_norm": 25.952046844602915, + "learning_rate": 3.621057499420932e-09, + "loss": 0.5223, + "step": 23714 + }, + { + "epoch": 0.978583807873236, + "grad_norm": 3.286491323359372, + "learning_rate": 3.6071493259202116e-09, + "loss": 0.5487, + "step": 23715 + }, + { + "epoch": 0.9786250722125939, + "grad_norm": 8.76351990154513, + "learning_rate": 3.593267881739082e-09, + "loss": 0.4822, + "step": 23716 + }, + { + "epoch": 0.9786663365519518, + "grad_norm": 14.368620415517963, + "learning_rate": 3.5794131671255114e-09, + "loss": 0.5087, + "step": 23717 + }, + { + "epoch": 0.9787076008913097, + "grad_norm": 4.710407045391206, + "learning_rate": 3.5655851823271357e-09, + "loss": 0.5243, + "step": 23718 + }, + { + "epoch": 0.9787488652306676, + "grad_norm": 3.750180946728891, + "learning_rate": 3.5517839275910902e-09, + "loss": 0.518, + "step": 23719 + }, + { + "epoch": 0.9787901295700255, + "grad_norm": 3.9718215712796723, + "learning_rate": 3.5380094031635114e-09, + "loss": 0.4403, + "step": 23720 + }, + { + "epoch": 0.9788313939093836, + "grad_norm": 3.7894110349822965, + "learning_rate": 3.524261609290702e-09, + "loss": 0.5348, + "step": 23721 + }, + { + "epoch": 0.9788726582487415, + "grad_norm": 2.822696002195565, + "learning_rate": 3.5105405462182993e-09, + "loss": 0.4815, + "step": 23722 + }, + { + "epoch": 0.9789139225880994, + "grad_norm": 6.609715567125814, + "learning_rate": 3.4968462141911073e-09, + "loss": 0.4991, + "step": 23723 + }, + { + "epoch": 0.9789551869274573, + "grad_norm": 2.9688629222028187, + "learning_rate": 3.4831786134540967e-09, + "loss": 0.5516, + "step": 23724 + }, + { + "epoch": 0.9789964512668152, + "grad_norm": 2.975466122790508, + "learning_rate": 3.4695377442512388e-09, + "loss": 0.4974, + "step": 23725 + }, + { + "epoch": 0.9790377156061731, + "grad_norm": 2.0593598957064447, + "learning_rate": 3.4559236068260058e-09, + "loss": 0.5074, + "step": 23726 + }, + { + "epoch": 0.9790789799455311, + "grad_norm": 96.59169585529816, + "learning_rate": 3.4423362014218697e-09, + "loss": 0.5206, + "step": 23727 + }, + { + "epoch": 0.979120244284889, + "grad_norm": 3.3618091462722597, + "learning_rate": 3.4287755282814694e-09, + "loss": 0.5355, + "step": 23728 + }, + { + "epoch": 0.9791615086242469, + "grad_norm": 7.427976324844408, + "learning_rate": 3.4152415876469447e-09, + "loss": 0.5909, + "step": 23729 + }, + { + "epoch": 0.9792027729636048, + "grad_norm": 3.153604932891732, + "learning_rate": 3.401734379760102e-09, + "loss": 0.4643, + "step": 23730 + }, + { + "epoch": 0.9792440373029628, + "grad_norm": 2.9474867054383735, + "learning_rate": 3.3882539048620818e-09, + "loss": 0.5385, + "step": 23731 + }, + { + "epoch": 0.9792853016423207, + "grad_norm": 4.7226164712771075, + "learning_rate": 3.3748001631938583e-09, + "loss": 0.4541, + "step": 23732 + }, + { + "epoch": 0.9793265659816787, + "grad_norm": 3.5257869083628535, + "learning_rate": 3.3613731549957392e-09, + "loss": 0.5171, + "step": 23733 + }, + { + "epoch": 0.9793678303210366, + "grad_norm": 4.197652303966216, + "learning_rate": 3.3479728805075328e-09, + "loss": 0.5099, + "step": 23734 + }, + { + "epoch": 0.9794090946603945, + "grad_norm": 2.5062315249245932, + "learning_rate": 3.334599339968547e-09, + "loss": 0.4904, + "step": 23735 + }, + { + "epoch": 0.9794503589997524, + "grad_norm": 4.375882358115947, + "learning_rate": 3.3212525336175915e-09, + "loss": 0.4726, + "step": 23736 + }, + { + "epoch": 0.9794916233391103, + "grad_norm": 6.55836103818947, + "learning_rate": 3.307932461693308e-09, + "loss": 0.4913, + "step": 23737 + }, + { + "epoch": 0.9795328876784682, + "grad_norm": 3.9718676593514908, + "learning_rate": 3.2946391244335073e-09, + "loss": 0.5222, + "step": 23738 + }, + { + "epoch": 0.9795741520178262, + "grad_norm": 4.677092141632449, + "learning_rate": 3.2813725220754986e-09, + "loss": 0.5032, + "step": 23739 + }, + { + "epoch": 0.9796154163571841, + "grad_norm": 2.997832301387306, + "learning_rate": 3.2681326548565924e-09, + "loss": 0.5274, + "step": 23740 + }, + { + "epoch": 0.9796566806965421, + "grad_norm": 2.179369009157219, + "learning_rate": 3.2549195230129335e-09, + "loss": 0.5376, + "step": 23741 + }, + { + "epoch": 0.9796979450359, + "grad_norm": 4.410833877017448, + "learning_rate": 3.2417331267806662e-09, + "loss": 0.4899, + "step": 23742 + }, + { + "epoch": 0.9797392093752579, + "grad_norm": 3.930895357681961, + "learning_rate": 3.228573466395435e-09, + "loss": 0.4893, + "step": 23743 + }, + { + "epoch": 0.9797804737146159, + "grad_norm": 5.62177698232416, + "learning_rate": 3.2154405420922183e-09, + "loss": 0.5046, + "step": 23744 + }, + { + "epoch": 0.9798217380539738, + "grad_norm": 3.127997679801781, + "learning_rate": 3.2023343541056628e-09, + "loss": 0.4832, + "step": 23745 + }, + { + "epoch": 0.9798630023933317, + "grad_norm": 2.5832486127918126, + "learning_rate": 3.1892549026699134e-09, + "loss": 0.5231, + "step": 23746 + }, + { + "epoch": 0.9799042667326896, + "grad_norm": 2.732388350672869, + "learning_rate": 3.1762021880182844e-09, + "loss": 0.436, + "step": 23747 + }, + { + "epoch": 0.9799455310720475, + "grad_norm": 4.092845287078239, + "learning_rate": 3.163176210384422e-09, + "loss": 0.5, + "step": 23748 + }, + { + "epoch": 0.9799867954114054, + "grad_norm": 4.6635723786128365, + "learning_rate": 3.1501769700004735e-09, + "loss": 0.4917, + "step": 23749 + }, + { + "epoch": 0.9800280597507633, + "grad_norm": 6.093778088684002, + "learning_rate": 3.1372044670990864e-09, + "loss": 0.5394, + "step": 23750 + }, + { + "epoch": 0.9800693240901213, + "grad_norm": 3.425005153657461, + "learning_rate": 3.124258701911742e-09, + "loss": 0.4794, + "step": 23751 + }, + { + "epoch": 0.9801105884294793, + "grad_norm": 3.2878808308300624, + "learning_rate": 3.111339674669755e-09, + "loss": 0.5242, + "step": 23752 + }, + { + "epoch": 0.9801518527688372, + "grad_norm": 4.078385840883562, + "learning_rate": 3.098447385603942e-09, + "loss": 0.4757, + "step": 23753 + }, + { + "epoch": 0.9801931171081951, + "grad_norm": 2.4376645173250484, + "learning_rate": 3.0855818349444507e-09, + "loss": 0.5116, + "step": 23754 + }, + { + "epoch": 0.980234381447553, + "grad_norm": 3.792693562803618, + "learning_rate": 3.072743022921265e-09, + "loss": 0.5137, + "step": 23755 + }, + { + "epoch": 0.980275645786911, + "grad_norm": 17.451223805400716, + "learning_rate": 3.059930949763534e-09, + "loss": 0.5276, + "step": 23756 + }, + { + "epoch": 0.9803169101262689, + "grad_norm": 5.340372215199299, + "learning_rate": 3.047145615700242e-09, + "loss": 0.5209, + "step": 23757 + }, + { + "epoch": 0.9803581744656268, + "grad_norm": 6.361278535378362, + "learning_rate": 3.034387020959706e-09, + "loss": 0.521, + "step": 23758 + }, + { + "epoch": 0.9803994388049847, + "grad_norm": 2.6603935177885947, + "learning_rate": 3.02165516576991e-09, + "loss": 0.5272, + "step": 23759 + }, + { + "epoch": 0.9804407031443426, + "grad_norm": 7.309503949952472, + "learning_rate": 3.0089500503581726e-09, + "loss": 0.5456, + "step": 23760 + }, + { + "epoch": 0.9804819674837005, + "grad_norm": 2.4809313858816266, + "learning_rate": 2.9962716749514786e-09, + "loss": 0.4881, + "step": 23761 + }, + { + "epoch": 0.9805232318230586, + "grad_norm": 14.178784172907003, + "learning_rate": 2.9836200397763137e-09, + "loss": 0.4954, + "step": 23762 + }, + { + "epoch": 0.9805644961624165, + "grad_norm": 3.591252993734531, + "learning_rate": 2.9709951450586636e-09, + "loss": 0.5161, + "step": 23763 + }, + { + "epoch": 0.9806057605017744, + "grad_norm": 3.650501513944984, + "learning_rate": 2.958396991024015e-09, + "loss": 0.5146, + "step": 23764 + }, + { + "epoch": 0.9806470248411323, + "grad_norm": 2.9011817095965324, + "learning_rate": 2.945825577897521e-09, + "loss": 0.5601, + "step": 23765 + }, + { + "epoch": 0.9806882891804902, + "grad_norm": 2.215018184603506, + "learning_rate": 2.9332809059035015e-09, + "loss": 0.4775, + "step": 23766 + }, + { + "epoch": 0.9807295535198481, + "grad_norm": 3.8504768078178144, + "learning_rate": 2.9207629752661113e-09, + "loss": 0.5344, + "step": 23767 + }, + { + "epoch": 0.9807708178592061, + "grad_norm": 3.5888219484106423, + "learning_rate": 2.9082717862091713e-09, + "loss": 0.5168, + "step": 23768 + }, + { + "epoch": 0.980812082198564, + "grad_norm": 2.9505090074803753, + "learning_rate": 2.8958073389556695e-09, + "loss": 0.5246, + "step": 23769 + }, + { + "epoch": 0.9808533465379219, + "grad_norm": 9.937754533446764, + "learning_rate": 2.8833696337280947e-09, + "loss": 0.5602, + "step": 23770 + }, + { + "epoch": 0.9808946108772798, + "grad_norm": 13.06513716031212, + "learning_rate": 2.8709586707489354e-09, + "loss": 0.5151, + "step": 23771 + }, + { + "epoch": 0.9809358752166378, + "grad_norm": 2.3768909319942746, + "learning_rate": 2.858574450239515e-09, + "loss": 0.528, + "step": 23772 + }, + { + "epoch": 0.9809771395559957, + "grad_norm": 44.12734522935104, + "learning_rate": 2.8462169724213227e-09, + "loss": 0.4963, + "step": 23773 + }, + { + "epoch": 0.9810184038953537, + "grad_norm": 2.9588328358168234, + "learning_rate": 2.8338862375150153e-09, + "loss": 0.4801, + "step": 23774 + }, + { + "epoch": 0.9810596682347116, + "grad_norm": 2.6043941146894762, + "learning_rate": 2.8215822457407502e-09, + "loss": 0.5135, + "step": 23775 + }, + { + "epoch": 0.9811009325740695, + "grad_norm": 2.983427558926789, + "learning_rate": 2.8093049973183517e-09, + "loss": 0.5097, + "step": 23776 + }, + { + "epoch": 0.9811421969134274, + "grad_norm": 2.788945317202328, + "learning_rate": 2.79705449246731e-09, + "loss": 0.5596, + "step": 23777 + }, + { + "epoch": 0.9811834612527853, + "grad_norm": 5.837911204558522, + "learning_rate": 2.7848307314062847e-09, + "loss": 0.571, + "step": 23778 + }, + { + "epoch": 0.9812247255921432, + "grad_norm": 2.278024569898153, + "learning_rate": 2.772633714353601e-09, + "loss": 0.483, + "step": 23779 + }, + { + "epoch": 0.9812659899315012, + "grad_norm": 2.7410989911943147, + "learning_rate": 2.7604634415270836e-09, + "loss": 0.5589, + "step": 23780 + }, + { + "epoch": 0.9813072542708591, + "grad_norm": 3.095066856727141, + "learning_rate": 2.7483199131443928e-09, + "loss": 0.4533, + "step": 23781 + }, + { + "epoch": 0.9813485186102171, + "grad_norm": 4.431889960276853, + "learning_rate": 2.7362031294221877e-09, + "loss": 0.4939, + "step": 23782 + }, + { + "epoch": 0.981389782949575, + "grad_norm": 3.323015682546031, + "learning_rate": 2.7241130905767963e-09, + "loss": 0.5103, + "step": 23783 + }, + { + "epoch": 0.9814310472889329, + "grad_norm": 3.262058854132978, + "learning_rate": 2.712049796824545e-09, + "loss": 0.5251, + "step": 23784 + }, + { + "epoch": 0.9814723116282908, + "grad_norm": 19.41441680507894, + "learning_rate": 2.7000132483807616e-09, + "loss": 0.5042, + "step": 23785 + }, + { + "epoch": 0.9815135759676488, + "grad_norm": 3.012270505994822, + "learning_rate": 2.688003445460274e-09, + "loss": 0.4996, + "step": 23786 + }, + { + "epoch": 0.9815548403070067, + "grad_norm": 2.743472724649868, + "learning_rate": 2.6760203882777445e-09, + "loss": 0.525, + "step": 23787 + }, + { + "epoch": 0.9815961046463646, + "grad_norm": 3.9530868126042993, + "learning_rate": 2.6640640770473345e-09, + "loss": 0.5787, + "step": 23788 + }, + { + "epoch": 0.9816373689857225, + "grad_norm": 8.520900015640482, + "learning_rate": 2.6521345119825402e-09, + "loss": 0.5287, + "step": 23789 + }, + { + "epoch": 0.9816786333250804, + "grad_norm": 7.843833560968592, + "learning_rate": 2.640231693296191e-09, + "loss": 0.5512, + "step": 23790 + }, + { + "epoch": 0.9817198976644383, + "grad_norm": 13.917505911035184, + "learning_rate": 2.628355621201284e-09, + "loss": 0.5333, + "step": 23791 + }, + { + "epoch": 0.9817611620037964, + "grad_norm": 2.317892208431352, + "learning_rate": 2.616506295909815e-09, + "loss": 0.5081, + "step": 23792 + }, + { + "epoch": 0.9818024263431543, + "grad_norm": 8.8095211354884, + "learning_rate": 2.6046837176332826e-09, + "loss": 0.4822, + "step": 23793 + }, + { + "epoch": 0.9818436906825122, + "grad_norm": 3.7540163781846094, + "learning_rate": 2.592887886583184e-09, + "loss": 0.4622, + "step": 23794 + }, + { + "epoch": 0.9818849550218701, + "grad_norm": 18.77896042879983, + "learning_rate": 2.581118802970017e-09, + "loss": 0.5074, + "step": 23795 + }, + { + "epoch": 0.981926219361228, + "grad_norm": 2.2190552110894957, + "learning_rate": 2.5693764670039475e-09, + "loss": 0.4576, + "step": 23796 + }, + { + "epoch": 0.981967483700586, + "grad_norm": 8.197870301537511, + "learning_rate": 2.5576608788948075e-09, + "loss": 0.5632, + "step": 23797 + }, + { + "epoch": 0.9820087480399439, + "grad_norm": 2.9856945944434155, + "learning_rate": 2.545972038851929e-09, + "loss": 0.5415, + "step": 23798 + }, + { + "epoch": 0.9820500123793018, + "grad_norm": 3.5635462358129173, + "learning_rate": 2.534309947083979e-09, + "loss": 0.5848, + "step": 23799 + }, + { + "epoch": 0.9820912767186597, + "grad_norm": 2.0465353954105034, + "learning_rate": 2.5226746037994574e-09, + "loss": 0.4954, + "step": 23800 + }, + { + "epoch": 0.9821325410580176, + "grad_norm": 2.150936794314473, + "learning_rate": 2.511066009206031e-09, + "loss": 0.4984, + "step": 23801 + }, + { + "epoch": 0.9821738053973756, + "grad_norm": 3.7125695620744756, + "learning_rate": 2.4994841635110343e-09, + "loss": 0.5273, + "step": 23802 + }, + { + "epoch": 0.9822150697367336, + "grad_norm": 1.9955073079199133, + "learning_rate": 2.487929066921468e-09, + "loss": 0.4636, + "step": 23803 + }, + { + "epoch": 0.9822563340760915, + "grad_norm": 2.2013911340852816, + "learning_rate": 2.4764007196436677e-09, + "loss": 0.4284, + "step": 23804 + }, + { + "epoch": 0.9822975984154494, + "grad_norm": 2.7667719246909015, + "learning_rate": 2.464899121883635e-09, + "loss": 0.5369, + "step": 23805 + }, + { + "epoch": 0.9823388627548073, + "grad_norm": 3.443959496409208, + "learning_rate": 2.4534242738467048e-09, + "loss": 0.5121, + "step": 23806 + }, + { + "epoch": 0.9823801270941652, + "grad_norm": 1.7548150079837908, + "learning_rate": 2.4419761757380477e-09, + "loss": 0.4635, + "step": 23807 + }, + { + "epoch": 0.9824213914335231, + "grad_norm": 3.0819205049959764, + "learning_rate": 2.4305548277618327e-09, + "loss": 0.4815, + "step": 23808 + }, + { + "epoch": 0.982462655772881, + "grad_norm": 3.9233991549362637, + "learning_rate": 2.419160230122397e-09, + "loss": 0.5465, + "step": 23809 + }, + { + "epoch": 0.982503920112239, + "grad_norm": 17.175480871912992, + "learning_rate": 2.4077923830230774e-09, + "loss": 0.5144, + "step": 23810 + }, + { + "epoch": 0.9825451844515969, + "grad_norm": 2.280185521051835, + "learning_rate": 2.3964512866668787e-09, + "loss": 0.428, + "step": 23811 + }, + { + "epoch": 0.9825864487909548, + "grad_norm": 4.947005371444134, + "learning_rate": 2.3851369412566382e-09, + "loss": 0.4755, + "step": 23812 + }, + { + "epoch": 0.9826277131303128, + "grad_norm": 3.9091124727191353, + "learning_rate": 2.3738493469941946e-09, + "loss": 0.5624, + "step": 23813 + }, + { + "epoch": 0.9826689774696707, + "grad_norm": 2.5695658982818395, + "learning_rate": 2.3625885040813866e-09, + "loss": 0.489, + "step": 23814 + }, + { + "epoch": 0.9827102418090287, + "grad_norm": 2.594069844275843, + "learning_rate": 2.35135441271922e-09, + "loss": 0.4611, + "step": 23815 + }, + { + "epoch": 0.9827515061483866, + "grad_norm": 3.1881139925220983, + "learning_rate": 2.340147073108534e-09, + "loss": 0.5769, + "step": 23816 + }, + { + "epoch": 0.9827927704877445, + "grad_norm": 2.0740012659032034, + "learning_rate": 2.3289664854491686e-09, + "loss": 0.538, + "step": 23817 + }, + { + "epoch": 0.9828340348271024, + "grad_norm": 4.393864527498544, + "learning_rate": 2.317812649941131e-09, + "loss": 0.5251, + "step": 23818 + }, + { + "epoch": 0.9828752991664603, + "grad_norm": 2.2394337389630565, + "learning_rate": 2.3066855667837617e-09, + "loss": 0.5193, + "step": 23819 + }, + { + "epoch": 0.9829165635058182, + "grad_norm": 2.114949829045051, + "learning_rate": 2.2955852361754016e-09, + "loss": 0.5473, + "step": 23820 + }, + { + "epoch": 0.9829578278451762, + "grad_norm": 2.9186102740935045, + "learning_rate": 2.2845116583147253e-09, + "loss": 0.5088, + "step": 23821 + }, + { + "epoch": 0.9829990921845341, + "grad_norm": 2.4165649600913093, + "learning_rate": 2.2734648333992416e-09, + "loss": 0.4862, + "step": 23822 + }, + { + "epoch": 0.9830403565238921, + "grad_norm": 2.264161682661953, + "learning_rate": 2.2624447616266254e-09, + "loss": 0.5472, + "step": 23823 + }, + { + "epoch": 0.98308162086325, + "grad_norm": 3.9259182304043194, + "learning_rate": 2.2514514431933865e-09, + "loss": 0.4681, + "step": 23824 + }, + { + "epoch": 0.9831228852026079, + "grad_norm": 13.441851817816373, + "learning_rate": 2.240484878296034e-09, + "loss": 0.4654, + "step": 23825 + }, + { + "epoch": 0.9831641495419658, + "grad_norm": 10.753237760054153, + "learning_rate": 2.2295450671304118e-09, + "loss": 0.4922, + "step": 23826 + }, + { + "epoch": 0.9832054138813238, + "grad_norm": 2.4416129432353184, + "learning_rate": 2.2186320098918634e-09, + "loss": 0.5091, + "step": 23827 + }, + { + "epoch": 0.9832466782206817, + "grad_norm": 3.001165939502447, + "learning_rate": 2.2077457067755657e-09, + "loss": 0.5068, + "step": 23828 + }, + { + "epoch": 0.9832879425600396, + "grad_norm": 3.2883898055539578, + "learning_rate": 2.1968861579758635e-09, + "loss": 0.5191, + "step": 23829 + }, + { + "epoch": 0.9833292068993975, + "grad_norm": 12.72262191915251, + "learning_rate": 2.186053363686602e-09, + "loss": 0.5599, + "step": 23830 + }, + { + "epoch": 0.9833704712387554, + "grad_norm": 4.647800913923695, + "learning_rate": 2.1752473241014592e-09, + "loss": 0.5259, + "step": 23831 + }, + { + "epoch": 0.9834117355781133, + "grad_norm": 3.96163203881729, + "learning_rate": 2.164468039413281e-09, + "loss": 0.525, + "step": 23832 + }, + { + "epoch": 0.9834529999174714, + "grad_norm": 3.7057664510639183, + "learning_rate": 2.1537155098147466e-09, + "loss": 0.5041, + "step": 23833 + }, + { + "epoch": 0.9834942642568293, + "grad_norm": 4.32083386310783, + "learning_rate": 2.1429897354980355e-09, + "loss": 0.5517, + "step": 23834 + }, + { + "epoch": 0.9835355285961872, + "grad_norm": 4.194978413956682, + "learning_rate": 2.1322907166544947e-09, + "loss": 0.4725, + "step": 23835 + }, + { + "epoch": 0.9835767929355451, + "grad_norm": 2.061593775405015, + "learning_rate": 2.1216184534753045e-09, + "loss": 0.52, + "step": 23836 + }, + { + "epoch": 0.983618057274903, + "grad_norm": 5.764800257906505, + "learning_rate": 2.1109729461511463e-09, + "loss": 0.4833, + "step": 23837 + }, + { + "epoch": 0.983659321614261, + "grad_norm": 3.3104032232127665, + "learning_rate": 2.1003541948720338e-09, + "loss": 0.5118, + "step": 23838 + }, + { + "epoch": 0.9837005859536189, + "grad_norm": 3.229132979387601, + "learning_rate": 2.0897621998279827e-09, + "loss": 0.5213, + "step": 23839 + }, + { + "epoch": 0.9837418502929768, + "grad_norm": 2.779562435810574, + "learning_rate": 2.079196961207841e-09, + "loss": 0.5012, + "step": 23840 + }, + { + "epoch": 0.9837831146323347, + "grad_norm": 2.419203729789076, + "learning_rate": 2.068658479200458e-09, + "loss": 0.4786, + "step": 23841 + }, + { + "epoch": 0.9838243789716926, + "grad_norm": 2.657877578536748, + "learning_rate": 2.0581467539941832e-09, + "loss": 0.4906, + "step": 23842 + }, + { + "epoch": 0.9838656433110506, + "grad_norm": 6.165232771139329, + "learning_rate": 2.0476617857766998e-09, + "loss": 0.5101, + "step": 23843 + }, + { + "epoch": 0.9839069076504086, + "grad_norm": 2.7978118125964864, + "learning_rate": 2.0372035747351912e-09, + "loss": 0.4988, + "step": 23844 + }, + { + "epoch": 0.9839481719897665, + "grad_norm": 2.712153267161481, + "learning_rate": 2.026772121056508e-09, + "loss": 0.491, + "step": 23845 + }, + { + "epoch": 0.9839894363291244, + "grad_norm": 2.391926835460338, + "learning_rate": 2.016367424927168e-09, + "loss": 0.4948, + "step": 23846 + }, + { + "epoch": 0.9840307006684823, + "grad_norm": 5.468747837925359, + "learning_rate": 2.0059894865328555e-09, + "loss": 0.5049, + "step": 23847 + }, + { + "epoch": 0.9840719650078402, + "grad_norm": 11.56643247482547, + "learning_rate": 1.9956383060589224e-09, + "loss": 0.4678, + "step": 23848 + }, + { + "epoch": 0.9841132293471981, + "grad_norm": 4.4554639064247405, + "learning_rate": 1.9853138836902206e-09, + "loss": 0.4867, + "step": 23849 + }, + { + "epoch": 0.984154493686556, + "grad_norm": 2.7271182572802575, + "learning_rate": 1.9750162196114362e-09, + "loss": 0.5176, + "step": 23850 + }, + { + "epoch": 0.984195758025914, + "grad_norm": 4.272950417628695, + "learning_rate": 1.9647453140062556e-09, + "loss": 0.5331, + "step": 23851 + }, + { + "epoch": 0.9842370223652719, + "grad_norm": 3.885009720039459, + "learning_rate": 1.9545011670583644e-09, + "loss": 0.5392, + "step": 23852 + }, + { + "epoch": 0.9842782867046299, + "grad_norm": 2.471521876148587, + "learning_rate": 1.944283778950451e-09, + "loss": 0.481, + "step": 23853 + }, + { + "epoch": 0.9843195510439878, + "grad_norm": 2.2993022485041807, + "learning_rate": 1.9340931498652015e-09, + "loss": 0.5472, + "step": 23854 + }, + { + "epoch": 0.9843608153833457, + "grad_norm": 7.440187071203931, + "learning_rate": 1.923929279984804e-09, + "loss": 0.4871, + "step": 23855 + }, + { + "epoch": 0.9844020797227037, + "grad_norm": 3.9658276204270084, + "learning_rate": 1.913792169490447e-09, + "loss": 0.4765, + "step": 23856 + }, + { + "epoch": 0.9844433440620616, + "grad_norm": 17.430280561604143, + "learning_rate": 1.903681818563485e-09, + "loss": 0.5433, + "step": 23857 + }, + { + "epoch": 0.9844846084014195, + "grad_norm": 6.093988527575311, + "learning_rate": 1.893598227384441e-09, + "loss": 0.4862, + "step": 23858 + }, + { + "epoch": 0.9845258727407774, + "grad_norm": 3.6364674135536688, + "learning_rate": 1.8835413961335034e-09, + "loss": 0.5379, + "step": 23859 + }, + { + "epoch": 0.9845671370801353, + "grad_norm": 5.924044574171636, + "learning_rate": 1.873511324990196e-09, + "loss": 0.5282, + "step": 23860 + }, + { + "epoch": 0.9846084014194932, + "grad_norm": 9.362998760473529, + "learning_rate": 1.8635080141335414e-09, + "loss": 0.514, + "step": 23861 + }, + { + "epoch": 0.9846496657588512, + "grad_norm": 2.5158643392825413, + "learning_rate": 1.8535314637425638e-09, + "loss": 0.5054, + "step": 23862 + }, + { + "epoch": 0.9846909300982092, + "grad_norm": 2.7656952975957356, + "learning_rate": 1.843581673995287e-09, + "loss": 0.5159, + "step": 23863 + }, + { + "epoch": 0.9847321944375671, + "grad_norm": 8.057585856690192, + "learning_rate": 1.8336586450694026e-09, + "loss": 0.4792, + "step": 23864 + }, + { + "epoch": 0.984773458776925, + "grad_norm": 2.6054135730196073, + "learning_rate": 1.8237623771422684e-09, + "loss": 0.4627, + "step": 23865 + }, + { + "epoch": 0.9848147231162829, + "grad_norm": 5.438706808707721, + "learning_rate": 1.8138928703905767e-09, + "loss": 0.513, + "step": 23866 + }, + { + "epoch": 0.9848559874556408, + "grad_norm": 3.525939919619814, + "learning_rate": 1.804050124990686e-09, + "loss": 0.5464, + "step": 23867 + }, + { + "epoch": 0.9848972517949988, + "grad_norm": 3.502718396475279, + "learning_rate": 1.7942341411182894e-09, + "loss": 0.4659, + "step": 23868 + }, + { + "epoch": 0.9849385161343567, + "grad_norm": 4.454801957477239, + "learning_rate": 1.7844449189487466e-09, + "loss": 0.5707, + "step": 23869 + }, + { + "epoch": 0.9849797804737146, + "grad_norm": 2.3620562943800594, + "learning_rate": 1.7746824586570842e-09, + "loss": 0.583, + "step": 23870 + }, + { + "epoch": 0.9850210448130725, + "grad_norm": 4.2635526033987095, + "learning_rate": 1.7649467604176628e-09, + "loss": 0.531, + "step": 23871 + }, + { + "epoch": 0.9850623091524304, + "grad_norm": 3.1674471277059415, + "learning_rate": 1.7552378244041766e-09, + "loss": 0.5164, + "step": 23872 + }, + { + "epoch": 0.9851035734917883, + "grad_norm": 5.024091782697196, + "learning_rate": 1.7455556507901537e-09, + "loss": 0.5255, + "step": 23873 + }, + { + "epoch": 0.9851448378311464, + "grad_norm": 2.8458152236095016, + "learning_rate": 1.7359002397486223e-09, + "loss": 0.5049, + "step": 23874 + }, + { + "epoch": 0.9851861021705043, + "grad_norm": 3.2860957292926605, + "learning_rate": 1.7262715914519445e-09, + "loss": 0.5151, + "step": 23875 + }, + { + "epoch": 0.9852273665098622, + "grad_norm": 2.4527087367743547, + "learning_rate": 1.7166697060723159e-09, + "loss": 0.5081, + "step": 23876 + }, + { + "epoch": 0.9852686308492201, + "grad_norm": 3.1219949401477516, + "learning_rate": 1.7070945837809326e-09, + "loss": 0.4926, + "step": 23877 + }, + { + "epoch": 0.985309895188578, + "grad_norm": 6.153519253914246, + "learning_rate": 1.6975462247489915e-09, + "loss": 0.4643, + "step": 23878 + }, + { + "epoch": 0.985351159527936, + "grad_norm": 3.375728487845865, + "learning_rate": 1.688024629147189e-09, + "loss": 0.4899, + "step": 23879 + }, + { + "epoch": 0.9853924238672939, + "grad_norm": 2.3458442827791, + "learning_rate": 1.6785297971453895e-09, + "loss": 0.4436, + "step": 23880 + }, + { + "epoch": 0.9854336882066518, + "grad_norm": 3.3508417742357337, + "learning_rate": 1.6690617289132903e-09, + "loss": 0.5059, + "step": 23881 + }, + { + "epoch": 0.9854749525460097, + "grad_norm": 1.9981852421957387, + "learning_rate": 1.6596204246199232e-09, + "loss": 0.4938, + "step": 23882 + }, + { + "epoch": 0.9855162168853676, + "grad_norm": 4.137025538225878, + "learning_rate": 1.6502058844341528e-09, + "loss": 0.5218, + "step": 23883 + }, + { + "epoch": 0.9855574812247256, + "grad_norm": 2.8231202970306777, + "learning_rate": 1.6408181085238449e-09, + "loss": 0.4808, + "step": 23884 + }, + { + "epoch": 0.9855987455640836, + "grad_norm": 5.515164078943846, + "learning_rate": 1.631457097057032e-09, + "loss": 0.5228, + "step": 23885 + }, + { + "epoch": 0.9856400099034415, + "grad_norm": 4.784330759047279, + "learning_rate": 1.6221228502005802e-09, + "loss": 0.513, + "step": 23886 + }, + { + "epoch": 0.9856812742427994, + "grad_norm": 2.4043493710534647, + "learning_rate": 1.612815368121523e-09, + "loss": 0.551, + "step": 23887 + }, + { + "epoch": 0.9857225385821573, + "grad_norm": 3.295690187532173, + "learning_rate": 1.603534650985894e-09, + "loss": 0.5018, + "step": 23888 + }, + { + "epoch": 0.9857638029215152, + "grad_norm": 2.293636517093353, + "learning_rate": 1.5942806989595605e-09, + "loss": 0.4594, + "step": 23889 + }, + { + "epoch": 0.9858050672608731, + "grad_norm": 12.790458962836276, + "learning_rate": 1.5850535122077237e-09, + "loss": 0.4509, + "step": 23890 + }, + { + "epoch": 0.985846331600231, + "grad_norm": 6.853885262169517, + "learning_rate": 1.5758530908952518e-09, + "loss": 0.5194, + "step": 23891 + }, + { + "epoch": 0.985887595939589, + "grad_norm": 3.1026197149186028, + "learning_rate": 1.5666794351866799e-09, + "loss": 0.5023, + "step": 23892 + }, + { + "epoch": 0.9859288602789469, + "grad_norm": 7.629076116647316, + "learning_rate": 1.55753254524571e-09, + "loss": 0.4726, + "step": 23893 + }, + { + "epoch": 0.9859701246183049, + "grad_norm": 4.096687475612417, + "learning_rate": 1.548412421235712e-09, + "loss": 0.4799, + "step": 23894 + }, + { + "epoch": 0.9860113889576628, + "grad_norm": 2.8628301364118647, + "learning_rate": 1.5393190633195554e-09, + "loss": 0.5798, + "step": 23895 + }, + { + "epoch": 0.9860526532970207, + "grad_norm": 6.578016343434275, + "learning_rate": 1.5302524716597766e-09, + "loss": 0.4802, + "step": 23896 + }, + { + "epoch": 0.9860939176363787, + "grad_norm": 2.384593866853213, + "learning_rate": 1.5212126464184128e-09, + "loss": 0.5196, + "step": 23897 + }, + { + "epoch": 0.9861351819757366, + "grad_norm": 2.5087616765822522, + "learning_rate": 1.5121995877566686e-09, + "loss": 0.4805, + "step": 23898 + }, + { + "epoch": 0.9861764463150945, + "grad_norm": 2.619808935145023, + "learning_rate": 1.503213295835748e-09, + "loss": 0.5836, + "step": 23899 + }, + { + "epoch": 0.9862177106544524, + "grad_norm": 2.4896543842605507, + "learning_rate": 1.4942537708160231e-09, + "loss": 0.4431, + "step": 23900 + }, + { + "epoch": 0.9862589749938103, + "grad_norm": 5.211802363467954, + "learning_rate": 1.4853210128576989e-09, + "loss": 0.5088, + "step": 23901 + }, + { + "epoch": 0.9863002393331682, + "grad_norm": 3.225522965886584, + "learning_rate": 1.4764150221201478e-09, + "loss": 0.5224, + "step": 23902 + }, + { + "epoch": 0.9863415036725262, + "grad_norm": 2.8309372126541645, + "learning_rate": 1.4675357987625758e-09, + "loss": 0.514, + "step": 23903 + }, + { + "epoch": 0.9863827680118842, + "grad_norm": 4.596580029950883, + "learning_rate": 1.4586833429436897e-09, + "loss": 0.4744, + "step": 23904 + }, + { + "epoch": 0.9864240323512421, + "grad_norm": 3.2944629369846106, + "learning_rate": 1.4498576548213627e-09, + "loss": 0.4705, + "step": 23905 + }, + { + "epoch": 0.9864652966906, + "grad_norm": 4.16400547090445, + "learning_rate": 1.4410587345534687e-09, + "loss": 0.5015, + "step": 23906 + }, + { + "epoch": 0.9865065610299579, + "grad_norm": 4.782101728952456, + "learning_rate": 1.4322865822970488e-09, + "loss": 0.4729, + "step": 23907 + }, + { + "epoch": 0.9865478253693158, + "grad_norm": 4.396527321455705, + "learning_rate": 1.423541198208811e-09, + "loss": 0.5068, + "step": 23908 + }, + { + "epoch": 0.9865890897086738, + "grad_norm": 3.6172417404178434, + "learning_rate": 1.4148225824449635e-09, + "loss": 0.4987, + "step": 23909 + }, + { + "epoch": 0.9866303540480317, + "grad_norm": 2.348498507209684, + "learning_rate": 1.4061307351613817e-09, + "loss": 0.5326, + "step": 23910 + }, + { + "epoch": 0.9866716183873896, + "grad_norm": 10.11468107426374, + "learning_rate": 1.3974656565131083e-09, + "loss": 0.5101, + "step": 23911 + }, + { + "epoch": 0.9867128827267475, + "grad_norm": 4.152739782743325, + "learning_rate": 1.3888273466550194e-09, + "loss": 0.5005, + "step": 23912 + }, + { + "epoch": 0.9867541470661054, + "grad_norm": 3.71805328742982, + "learning_rate": 1.3802158057414916e-09, + "loss": 0.5386, + "step": 23913 + }, + { + "epoch": 0.9867954114054635, + "grad_norm": 2.1397422583454895, + "learning_rate": 1.371631033926235e-09, + "loss": 0.523, + "step": 23914 + }, + { + "epoch": 0.9868366757448214, + "grad_norm": 2.3157695396944873, + "learning_rate": 1.3630730313626272e-09, + "loss": 0.4816, + "step": 23915 + }, + { + "epoch": 0.9868779400841793, + "grad_norm": 3.926105693331044, + "learning_rate": 1.3545417982037123e-09, + "loss": 0.5205, + "step": 23916 + }, + { + "epoch": 0.9869192044235372, + "grad_norm": 3.6497439554249653, + "learning_rate": 1.3460373346015354e-09, + "loss": 0.5027, + "step": 23917 + }, + { + "epoch": 0.9869604687628951, + "grad_norm": 2.474315544856711, + "learning_rate": 1.3375596407083079e-09, + "loss": 0.4647, + "step": 23918 + }, + { + "epoch": 0.987001733102253, + "grad_norm": 2.292142198643337, + "learning_rate": 1.3291087166752425e-09, + "loss": 0.5445, + "step": 23919 + }, + { + "epoch": 0.987042997441611, + "grad_norm": 2.605820797731152, + "learning_rate": 1.3206845626533848e-09, + "loss": 0.526, + "step": 23920 + }, + { + "epoch": 0.9870842617809689, + "grad_norm": 4.6098813353471115, + "learning_rate": 1.312287178793281e-09, + "loss": 0.5212, + "step": 23921 + }, + { + "epoch": 0.9871255261203268, + "grad_norm": 3.4379797316246714, + "learning_rate": 1.303916565244978e-09, + "loss": 0.534, + "step": 23922 + }, + { + "epoch": 0.9871667904596847, + "grad_norm": 2.8351468514732376, + "learning_rate": 1.2955727221576897e-09, + "loss": 0.4967, + "step": 23923 + }, + { + "epoch": 0.9872080547990427, + "grad_norm": 2.6269231957626684, + "learning_rate": 1.2872556496807964e-09, + "loss": 0.5149, + "step": 23924 + }, + { + "epoch": 0.9872493191384006, + "grad_norm": 9.403061131780108, + "learning_rate": 1.2789653479626795e-09, + "loss": 0.5397, + "step": 23925 + }, + { + "epoch": 0.9872905834777586, + "grad_norm": 2.299504041865738, + "learning_rate": 1.2707018171513873e-09, + "loss": 0.5369, + "step": 23926 + }, + { + "epoch": 0.9873318478171165, + "grad_norm": 3.1723628873197036, + "learning_rate": 1.262465057394635e-09, + "loss": 0.5342, + "step": 23927 + }, + { + "epoch": 0.9873731121564744, + "grad_norm": 5.034194696187677, + "learning_rate": 1.2542550688396382e-09, + "loss": 0.5185, + "step": 23928 + }, + { + "epoch": 0.9874143764958323, + "grad_norm": 6.377625633674503, + "learning_rate": 1.2460718516329461e-09, + "loss": 0.5201, + "step": 23929 + }, + { + "epoch": 0.9874556408351902, + "grad_norm": 2.4975706167674563, + "learning_rate": 1.237915405920609e-09, + "loss": 0.4965, + "step": 23930 + }, + { + "epoch": 0.9874969051745481, + "grad_norm": 2.9170127563881647, + "learning_rate": 1.2297857318483429e-09, + "loss": 0.4966, + "step": 23931 + }, + { + "epoch": 0.987538169513906, + "grad_norm": 4.730095687131683, + "learning_rate": 1.2216828295615323e-09, + "loss": 0.5327, + "step": 23932 + }, + { + "epoch": 0.987579433853264, + "grad_norm": 3.531220826830466, + "learning_rate": 1.2136066992048944e-09, + "loss": 0.4541, + "step": 23933 + }, + { + "epoch": 0.9876206981926219, + "grad_norm": 2.97975616442234, + "learning_rate": 1.2055573409226472e-09, + "loss": 0.5078, + "step": 23934 + }, + { + "epoch": 0.9876619625319799, + "grad_norm": 5.877221116181762, + "learning_rate": 1.1975347548585092e-09, + "loss": 0.5445, + "step": 23935 + }, + { + "epoch": 0.9877032268713378, + "grad_norm": 3.1237255040045673, + "learning_rate": 1.1895389411558654e-09, + "loss": 0.5215, + "step": 23936 + }, + { + "epoch": 0.9877444912106957, + "grad_norm": 4.6332890970153, + "learning_rate": 1.1815698999574354e-09, + "loss": 0.5241, + "step": 23937 + }, + { + "epoch": 0.9877857555500537, + "grad_norm": 3.3457284630970254, + "learning_rate": 1.1736276314057715e-09, + "loss": 0.5118, + "step": 23938 + }, + { + "epoch": 0.9878270198894116, + "grad_norm": 2.226987822170534, + "learning_rate": 1.1657121356424271e-09, + "loss": 0.4548, + "step": 23939 + }, + { + "epoch": 0.9878682842287695, + "grad_norm": 2.5494152211966705, + "learning_rate": 1.1578234128091225e-09, + "loss": 0.4938, + "step": 23940 + }, + { + "epoch": 0.9879095485681274, + "grad_norm": 2.9439171093413368, + "learning_rate": 1.1499614630465783e-09, + "loss": 0.4713, + "step": 23941 + }, + { + "epoch": 0.9879508129074853, + "grad_norm": 3.1560566664219687, + "learning_rate": 1.142126286495182e-09, + "loss": 0.4853, + "step": 23942 + }, + { + "epoch": 0.9879920772468432, + "grad_norm": 3.9646023795574625, + "learning_rate": 1.1343178832949885e-09, + "loss": 0.5261, + "step": 23943 + }, + { + "epoch": 0.9880333415862012, + "grad_norm": 5.392956325849507, + "learning_rate": 1.1265362535855529e-09, + "loss": 0.554, + "step": 23944 + }, + { + "epoch": 0.9880746059255592, + "grad_norm": 3.925025893550183, + "learning_rate": 1.1187813975057636e-09, + "loss": 0.4507, + "step": 23945 + }, + { + "epoch": 0.9881158702649171, + "grad_norm": 2.397432097049971, + "learning_rate": 1.1110533151941771e-09, + "loss": 0.4997, + "step": 23946 + }, + { + "epoch": 0.988157134604275, + "grad_norm": 3.7458879416558157, + "learning_rate": 1.1033520067888493e-09, + "loss": 0.5373, + "step": 23947 + }, + { + "epoch": 0.9881983989436329, + "grad_norm": 2.7906910840139507, + "learning_rate": 1.09567747242717e-09, + "loss": 0.5116, + "step": 23948 + }, + { + "epoch": 0.9882396632829908, + "grad_norm": 8.748015471522583, + "learning_rate": 1.0880297122465299e-09, + "loss": 0.5203, + "step": 23949 + }, + { + "epoch": 0.9882809276223488, + "grad_norm": 2.9713051997003195, + "learning_rate": 1.0804087263833196e-09, + "loss": 0.4846, + "step": 23950 + }, + { + "epoch": 0.9883221919617067, + "grad_norm": 3.9805785103549614, + "learning_rate": 1.0728145149737634e-09, + "loss": 0.5522, + "step": 23951 + }, + { + "epoch": 0.9883634563010646, + "grad_norm": 4.313331061613628, + "learning_rate": 1.0652470781534196e-09, + "loss": 0.521, + "step": 23952 + }, + { + "epoch": 0.9884047206404225, + "grad_norm": 3.02153028803642, + "learning_rate": 1.05770641605768e-09, + "loss": 0.5415, + "step": 23953 + }, + { + "epoch": 0.9884459849797804, + "grad_norm": 15.53864251411325, + "learning_rate": 1.050192528820937e-09, + "loss": 0.5102, + "step": 23954 + }, + { + "epoch": 0.9884872493191385, + "grad_norm": 2.501905602381731, + "learning_rate": 1.042705416577583e-09, + "loss": 0.5553, + "step": 23955 + }, + { + "epoch": 0.9885285136584964, + "grad_norm": 3.818696004205952, + "learning_rate": 1.0352450794613445e-09, + "loss": 0.4664, + "step": 23956 + }, + { + "epoch": 0.9885697779978543, + "grad_norm": 2.096223164289898, + "learning_rate": 1.0278115176054481e-09, + "loss": 0.4729, + "step": 23957 + }, + { + "epoch": 0.9886110423372122, + "grad_norm": 4.6026418980548796, + "learning_rate": 1.0204047311426213e-09, + "loss": 0.4992, + "step": 23958 + }, + { + "epoch": 0.9886523066765701, + "grad_norm": 4.863265554029495, + "learning_rate": 1.0130247202052579e-09, + "loss": 0.5263, + "step": 23959 + }, + { + "epoch": 0.988693571015928, + "grad_norm": 3.0440078340513073, + "learning_rate": 1.0056714849252524e-09, + "loss": 0.5526, + "step": 23960 + }, + { + "epoch": 0.988734835355286, + "grad_norm": 3.732573532940451, + "learning_rate": 9.983450254338333e-10, + "loss": 0.555, + "step": 23961 + }, + { + "epoch": 0.9887760996946439, + "grad_norm": 2.778870129103927, + "learning_rate": 9.910453418618958e-10, + "loss": 0.5342, + "step": 23962 + }, + { + "epoch": 0.9888173640340018, + "grad_norm": 2.203598339040431, + "learning_rate": 9.837724343398358e-10, + "loss": 0.4946, + "step": 23963 + }, + { + "epoch": 0.9888586283733597, + "grad_norm": 4.268397468347196, + "learning_rate": 9.76526302997549e-10, + "loss": 0.5109, + "step": 23964 + }, + { + "epoch": 0.9888998927127177, + "grad_norm": 2.7460366174256725, + "learning_rate": 9.693069479645987e-10, + "loss": 0.5018, + "step": 23965 + }, + { + "epoch": 0.9889411570520756, + "grad_norm": 4.6087499336677125, + "learning_rate": 9.621143693697154e-10, + "loss": 0.4901, + "step": 23966 + }, + { + "epoch": 0.9889824213914336, + "grad_norm": 2.9609469724267483, + "learning_rate": 9.549485673414625e-10, + "loss": 0.504, + "step": 23967 + }, + { + "epoch": 0.9890236857307915, + "grad_norm": 3.804167276563241, + "learning_rate": 9.478095420079047e-10, + "loss": 0.5162, + "step": 23968 + }, + { + "epoch": 0.9890649500701494, + "grad_norm": 7.097646351669311, + "learning_rate": 9.406972934964397e-10, + "loss": 0.4907, + "step": 23969 + }, + { + "epoch": 0.9891062144095073, + "grad_norm": 2.6668294581530487, + "learning_rate": 9.336118219342993e-10, + "loss": 0.477, + "step": 23970 + }, + { + "epoch": 0.9891474787488652, + "grad_norm": 2.2395310655131855, + "learning_rate": 9.265531274478822e-10, + "loss": 0.5229, + "step": 23971 + }, + { + "epoch": 0.9891887430882231, + "grad_norm": 3.907092779024588, + "learning_rate": 9.19521210163421e-10, + "loss": 0.4848, + "step": 23972 + }, + { + "epoch": 0.989230007427581, + "grad_norm": 26.195971952344465, + "learning_rate": 9.125160702063151e-10, + "loss": 0.5524, + "step": 23973 + }, + { + "epoch": 0.989271271766939, + "grad_norm": 6.505419526924996, + "learning_rate": 9.055377077019645e-10, + "loss": 0.5328, + "step": 23974 + }, + { + "epoch": 0.989312536106297, + "grad_norm": 2.6701698181456806, + "learning_rate": 8.985861227747694e-10, + "loss": 0.6235, + "step": 23975 + }, + { + "epoch": 0.9893538004456549, + "grad_norm": 5.4452382549292855, + "learning_rate": 8.916613155489639e-10, + "loss": 0.5096, + "step": 23976 + }, + { + "epoch": 0.9893950647850128, + "grad_norm": 2.112479199595221, + "learning_rate": 8.847632861484489e-10, + "loss": 0.5066, + "step": 23977 + }, + { + "epoch": 0.9894363291243707, + "grad_norm": 17.7358272905119, + "learning_rate": 8.778920346961261e-10, + "loss": 0.5151, + "step": 23978 + }, + { + "epoch": 0.9894775934637287, + "grad_norm": 4.947240280327321, + "learning_rate": 8.710475613148972e-10, + "loss": 0.5057, + "step": 23979 + }, + { + "epoch": 0.9895188578030866, + "grad_norm": 2.2418279318845666, + "learning_rate": 8.642298661269976e-10, + "loss": 0.4744, + "step": 23980 + }, + { + "epoch": 0.9895601221424445, + "grad_norm": 3.7030910157048753, + "learning_rate": 8.574389492543299e-10, + "loss": 0.5115, + "step": 23981 + }, + { + "epoch": 0.9896013864818024, + "grad_norm": 2.336859293212502, + "learning_rate": 8.50674810817964e-10, + "loss": 0.4574, + "step": 23982 + }, + { + "epoch": 0.9896426508211603, + "grad_norm": 2.6961745975726097, + "learning_rate": 8.439374509388032e-10, + "loss": 0.5865, + "step": 23983 + }, + { + "epoch": 0.9896839151605182, + "grad_norm": 5.761245270208058, + "learning_rate": 8.372268697374174e-10, + "loss": 0.527, + "step": 23984 + }, + { + "epoch": 0.9897251794998763, + "grad_norm": 3.6134099795316073, + "learning_rate": 8.305430673333781e-10, + "loss": 0.5264, + "step": 23985 + }, + { + "epoch": 0.9897664438392342, + "grad_norm": 3.5152487798259635, + "learning_rate": 8.238860438460893e-10, + "loss": 0.5089, + "step": 23986 + }, + { + "epoch": 0.9898077081785921, + "grad_norm": 6.054273584683438, + "learning_rate": 8.172557993946228e-10, + "loss": 0.5084, + "step": 23987 + }, + { + "epoch": 0.98984897251795, + "grad_norm": 3.0362366528475255, + "learning_rate": 8.106523340973837e-10, + "loss": 0.5878, + "step": 23988 + }, + { + "epoch": 0.9898902368573079, + "grad_norm": 4.4826372698944486, + "learning_rate": 8.040756480722777e-10, + "loss": 0.5574, + "step": 23989 + }, + { + "epoch": 0.9899315011966658, + "grad_norm": 3.21935914596062, + "learning_rate": 7.975257414368775e-10, + "loss": 0.5446, + "step": 23990 + }, + { + "epoch": 0.9899727655360238, + "grad_norm": 6.634673595253123, + "learning_rate": 7.910026143080895e-10, + "loss": 0.5726, + "step": 23991 + }, + { + "epoch": 0.9900140298753817, + "grad_norm": 8.039271229527618, + "learning_rate": 7.845062668023206e-10, + "loss": 0.5515, + "step": 23992 + }, + { + "epoch": 0.9900552942147396, + "grad_norm": 2.7151114360548867, + "learning_rate": 7.780366990358112e-10, + "loss": 0.4683, + "step": 23993 + }, + { + "epoch": 0.9900965585540975, + "grad_norm": 2.248195961777245, + "learning_rate": 7.715939111239689e-10, + "loss": 0.4758, + "step": 23994 + }, + { + "epoch": 0.9901378228934554, + "grad_norm": 2.993569880954252, + "learning_rate": 7.65177903182035e-10, + "loss": 0.4573, + "step": 23995 + }, + { + "epoch": 0.9901790872328134, + "grad_norm": 2.796085288387839, + "learning_rate": 7.587886753245842e-10, + "loss": 0.5161, + "step": 23996 + }, + { + "epoch": 0.9902203515721714, + "grad_norm": 2.866606107775519, + "learning_rate": 7.524262276655258e-10, + "loss": 0.523, + "step": 23997 + }, + { + "epoch": 0.9902616159115293, + "grad_norm": 3.586995494407008, + "learning_rate": 7.460905603187684e-10, + "loss": 0.5762, + "step": 23998 + }, + { + "epoch": 0.9903028802508872, + "grad_norm": 5.0748408088612, + "learning_rate": 7.397816733973884e-10, + "loss": 0.5207, + "step": 23999 + }, + { + "epoch": 0.9903441445902451, + "grad_norm": 8.944159214394606, + "learning_rate": 7.334995670141287e-10, + "loss": 0.5151, + "step": 24000 + }, + { + "epoch": 0.990385408929603, + "grad_norm": 3.9682126642090076, + "learning_rate": 7.272442412810664e-10, + "loss": 0.4787, + "step": 24001 + }, + { + "epoch": 0.9904266732689609, + "grad_norm": 2.7684911840472286, + "learning_rate": 7.210156963101122e-10, + "loss": 0.5435, + "step": 24002 + }, + { + "epoch": 0.9904679376083189, + "grad_norm": 3.0701355419638356, + "learning_rate": 7.148139322125102e-10, + "loss": 0.5176, + "step": 24003 + }, + { + "epoch": 0.9905092019476768, + "grad_norm": 3.0858461067802794, + "learning_rate": 7.086389490988388e-10, + "loss": 0.5142, + "step": 24004 + }, + { + "epoch": 0.9905504662870347, + "grad_norm": 2.1507384942057426, + "learning_rate": 7.024907470795094e-10, + "loss": 0.5006, + "step": 24005 + }, + { + "epoch": 0.9905917306263927, + "grad_norm": 3.178454600780207, + "learning_rate": 6.963693262644344e-10, + "loss": 0.5227, + "step": 24006 + }, + { + "epoch": 0.9906329949657506, + "grad_norm": 2.9789765631715763, + "learning_rate": 6.902746867628595e-10, + "loss": 0.5087, + "step": 24007 + }, + { + "epoch": 0.9906742593051086, + "grad_norm": 2.7127261831673524, + "learning_rate": 6.842068286836978e-10, + "loss": 0.5354, + "step": 24008 + }, + { + "epoch": 0.9907155236444665, + "grad_norm": 5.286056318713394, + "learning_rate": 6.781657521351959e-10, + "loss": 0.5292, + "step": 24009 + }, + { + "epoch": 0.9907567879838244, + "grad_norm": 3.8712371143752367, + "learning_rate": 6.721514572256004e-10, + "loss": 0.5689, + "step": 24010 + }, + { + "epoch": 0.9907980523231823, + "grad_norm": 3.326611868355331, + "learning_rate": 6.661639440619927e-10, + "loss": 0.5403, + "step": 24011 + }, + { + "epoch": 0.9908393166625402, + "grad_norm": 3.7959742929928635, + "learning_rate": 6.602032127514534e-10, + "loss": 0.4561, + "step": 24012 + }, + { + "epoch": 0.9908805810018981, + "grad_norm": 2.402247307495195, + "learning_rate": 6.542692634003977e-10, + "loss": 0.5015, + "step": 24013 + }, + { + "epoch": 0.990921845341256, + "grad_norm": 1.9950270131391408, + "learning_rate": 6.483620961150738e-10, + "loss": 0.5069, + "step": 24014 + }, + { + "epoch": 0.990963109680614, + "grad_norm": 2.879634007381052, + "learning_rate": 6.424817110005643e-10, + "loss": 0.5679, + "step": 24015 + }, + { + "epoch": 0.991004374019972, + "grad_norm": 3.5372671616612226, + "learning_rate": 6.366281081622848e-10, + "loss": 0.5215, + "step": 24016 + }, + { + "epoch": 0.9910456383593299, + "grad_norm": 5.525695998235723, + "learning_rate": 6.308012877046521e-10, + "loss": 0.5726, + "step": 24017 + }, + { + "epoch": 0.9910869026986878, + "grad_norm": 2.5511528396048577, + "learning_rate": 6.250012497317493e-10, + "loss": 0.5025, + "step": 24018 + }, + { + "epoch": 0.9911281670380457, + "grad_norm": 2.5589186490912383, + "learning_rate": 6.192279943473267e-10, + "loss": 0.5294, + "step": 24019 + }, + { + "epoch": 0.9911694313774037, + "grad_norm": 2.613731580771947, + "learning_rate": 6.134815216541356e-10, + "loss": 0.4529, + "step": 24020 + }, + { + "epoch": 0.9912106957167616, + "grad_norm": 5.21727847881547, + "learning_rate": 6.077618317552603e-10, + "loss": 0.5145, + "step": 24021 + }, + { + "epoch": 0.9912519600561195, + "grad_norm": 2.833191140610415, + "learning_rate": 6.020689247524525e-10, + "loss": 0.5249, + "step": 24022 + }, + { + "epoch": 0.9912932243954774, + "grad_norm": 2.832551242021715, + "learning_rate": 5.964028007477973e-10, + "loss": 0.5102, + "step": 24023 + }, + { + "epoch": 0.9913344887348353, + "grad_norm": 3.348578397272141, + "learning_rate": 5.90763459842214e-10, + "loss": 0.4769, + "step": 24024 + }, + { + "epoch": 0.9913757530741932, + "grad_norm": 4.189893002913307, + "learning_rate": 5.85150902136622e-10, + "loss": 0.5355, + "step": 24025 + }, + { + "epoch": 0.9914170174135513, + "grad_norm": 2.8339164417173017, + "learning_rate": 5.795651277311076e-10, + "loss": 0.4835, + "step": 24026 + }, + { + "epoch": 0.9914582817529092, + "grad_norm": 3.6364615466063146, + "learning_rate": 5.740061367255911e-10, + "loss": 0.4893, + "step": 24027 + }, + { + "epoch": 0.9914995460922671, + "grad_norm": 2.9417228451047595, + "learning_rate": 5.684739292191598e-10, + "loss": 0.4794, + "step": 24028 + }, + { + "epoch": 0.991540810431625, + "grad_norm": 14.684691905308464, + "learning_rate": 5.629685053110678e-10, + "loss": 0.4805, + "step": 24029 + }, + { + "epoch": 0.9915820747709829, + "grad_norm": 2.554588344610269, + "learning_rate": 5.574898650990701e-10, + "loss": 0.4906, + "step": 24030 + }, + { + "epoch": 0.9916233391103408, + "grad_norm": 2.809267079629837, + "learning_rate": 5.52038008681588e-10, + "loss": 0.5359, + "step": 24031 + }, + { + "epoch": 0.9916646034496988, + "grad_norm": 10.415601809685619, + "learning_rate": 5.466129361557104e-10, + "loss": 0.4493, + "step": 24032 + }, + { + "epoch": 0.9917058677890567, + "grad_norm": 2.405684378736842, + "learning_rate": 5.412146476181934e-10, + "loss": 0.5014, + "step": 24033 + }, + { + "epoch": 0.9917471321284146, + "grad_norm": 6.296745779523186, + "learning_rate": 5.358431431657929e-10, + "loss": 0.5624, + "step": 24034 + }, + { + "epoch": 0.9917883964677725, + "grad_norm": 6.806053380303337, + "learning_rate": 5.304984228944321e-10, + "loss": 0.4911, + "step": 24035 + }, + { + "epoch": 0.9918296608071305, + "grad_norm": 3.6136661774438643, + "learning_rate": 5.251804868993681e-10, + "loss": 0.5364, + "step": 24036 + }, + { + "epoch": 0.9918709251464884, + "grad_norm": 2.7411265287809576, + "learning_rate": 5.198893352756917e-10, + "loss": 0.5044, + "step": 24037 + }, + { + "epoch": 0.9919121894858464, + "grad_norm": 2.7396379451062365, + "learning_rate": 5.146249681178272e-10, + "loss": 0.5181, + "step": 24038 + }, + { + "epoch": 0.9919534538252043, + "grad_norm": 2.7125081418459946, + "learning_rate": 5.093873855200326e-10, + "loss": 0.4891, + "step": 24039 + }, + { + "epoch": 0.9919947181645622, + "grad_norm": 2.9318398075442342, + "learning_rate": 5.041765875755666e-10, + "loss": 0.5864, + "step": 24040 + }, + { + "epoch": 0.9920359825039201, + "grad_norm": 3.4310495865568007, + "learning_rate": 4.989925743778546e-10, + "loss": 0.5475, + "step": 24041 + }, + { + "epoch": 0.992077246843278, + "grad_norm": 2.6052693747472886, + "learning_rate": 4.938353460191558e-10, + "loss": 0.4947, + "step": 24042 + }, + { + "epoch": 0.9921185111826359, + "grad_norm": 4.570688126056537, + "learning_rate": 4.887049025918966e-10, + "loss": 0.5362, + "step": 24043 + }, + { + "epoch": 0.9921597755219939, + "grad_norm": 3.818581946269352, + "learning_rate": 4.836012441875037e-10, + "loss": 0.4812, + "step": 24044 + }, + { + "epoch": 0.9922010398613518, + "grad_norm": 3.702132425446293, + "learning_rate": 4.78524370897071e-10, + "loss": 0.5434, + "step": 24045 + }, + { + "epoch": 0.9922423042007098, + "grad_norm": 2.285589152146059, + "learning_rate": 4.734742828116922e-10, + "loss": 0.5097, + "step": 24046 + }, + { + "epoch": 0.9922835685400677, + "grad_norm": 3.277086668350401, + "learning_rate": 4.68450980021129e-10, + "loss": 0.5434, + "step": 24047 + }, + { + "epoch": 0.9923248328794256, + "grad_norm": 2.920258070212651, + "learning_rate": 4.634544626153092e-10, + "loss": 0.5071, + "step": 24048 + }, + { + "epoch": 0.9923660972187835, + "grad_norm": 7.4663474802706356, + "learning_rate": 4.5848473068349493e-10, + "loss": 0.5162, + "step": 24049 + }, + { + "epoch": 0.9924073615581415, + "grad_norm": 3.695463867414991, + "learning_rate": 4.5354178431428196e-10, + "loss": 0.5062, + "step": 24050 + }, + { + "epoch": 0.9924486258974994, + "grad_norm": 4.3385435589918275, + "learning_rate": 4.48625623596266e-10, + "loss": 0.505, + "step": 24051 + }, + { + "epoch": 0.9924898902368573, + "grad_norm": 4.325157199560825, + "learning_rate": 4.4373624861704377e-10, + "loss": 0.4749, + "step": 24052 + }, + { + "epoch": 0.9925311545762152, + "grad_norm": 3.801771235288242, + "learning_rate": 4.3887365946404525e-10, + "loss": 0.5197, + "step": 24053 + }, + { + "epoch": 0.9925724189155731, + "grad_norm": 3.583715747262546, + "learning_rate": 4.3403785622403436e-10, + "loss": 0.4715, + "step": 24054 + }, + { + "epoch": 0.992613683254931, + "grad_norm": 2.80386609943546, + "learning_rate": 4.29228838983442e-10, + "loss": 0.4513, + "step": 24055 + }, + { + "epoch": 0.992654947594289, + "grad_norm": 5.587991280237642, + "learning_rate": 4.244466078281994e-10, + "loss": 0.5139, + "step": 24056 + }, + { + "epoch": 0.992696211933647, + "grad_norm": 5.299751013666618, + "learning_rate": 4.196911628437383e-10, + "loss": 0.496, + "step": 24057 + }, + { + "epoch": 0.9927374762730049, + "grad_norm": 4.764751105252589, + "learning_rate": 4.149625041149907e-10, + "loss": 0.5108, + "step": 24058 + }, + { + "epoch": 0.9927787406123628, + "grad_norm": 24.183694376466292, + "learning_rate": 4.1026063172622254e-10, + "loss": 0.5038, + "step": 24059 + }, + { + "epoch": 0.9928200049517207, + "grad_norm": 2.17944835309327, + "learning_rate": 4.055855457618662e-10, + "loss": 0.4647, + "step": 24060 + }, + { + "epoch": 0.9928612692910787, + "grad_norm": 2.806116001745008, + "learning_rate": 4.0093724630502206e-10, + "loss": 0.4867, + "step": 24061 + }, + { + "epoch": 0.9929025336304366, + "grad_norm": 3.8147023045986566, + "learning_rate": 3.963157334387901e-10, + "loss": 0.4894, + "step": 24062 + }, + { + "epoch": 0.9929437979697945, + "grad_norm": 5.39139253876655, + "learning_rate": 3.917210072457711e-10, + "loss": 0.4704, + "step": 24063 + }, + { + "epoch": 0.9929850623091524, + "grad_norm": 3.068294536805078, + "learning_rate": 3.8715306780823243e-10, + "loss": 0.4699, + "step": 24064 + }, + { + "epoch": 0.9930263266485103, + "grad_norm": 2.8030987974150965, + "learning_rate": 3.8261191520744256e-10, + "loss": 0.4845, + "step": 24065 + }, + { + "epoch": 0.9930675909878682, + "grad_norm": 3.037293348420541, + "learning_rate": 3.7809754952483624e-10, + "loss": 0.5275, + "step": 24066 + }, + { + "epoch": 0.9931088553272263, + "grad_norm": 2.749053089661633, + "learning_rate": 3.736099708406826e-10, + "loss": 0.5002, + "step": 24067 + }, + { + "epoch": 0.9931501196665842, + "grad_norm": 3.3242682861551756, + "learning_rate": 3.6914917923558387e-10, + "loss": 0.479, + "step": 24068 + }, + { + "epoch": 0.9931913840059421, + "grad_norm": 3.510395290065532, + "learning_rate": 3.6471517478881e-10, + "loss": 0.5424, + "step": 24069 + }, + { + "epoch": 0.9932326483453, + "grad_norm": 5.201121733936017, + "learning_rate": 3.603079575797974e-10, + "loss": 0.4957, + "step": 24070 + }, + { + "epoch": 0.9932739126846579, + "grad_norm": 4.700987735101418, + "learning_rate": 3.5592752768714985e-10, + "loss": 0.5473, + "step": 24071 + }, + { + "epoch": 0.9933151770240158, + "grad_norm": 2.4671521990130887, + "learning_rate": 3.5157388518930467e-10, + "loss": 0.5458, + "step": 24072 + }, + { + "epoch": 0.9933564413633738, + "grad_norm": 2.401823058095781, + "learning_rate": 3.4724703016369984e-10, + "loss": 0.5222, + "step": 24073 + }, + { + "epoch": 0.9933977057027317, + "grad_norm": 3.5237645468820205, + "learning_rate": 3.4294696268794e-10, + "loss": 0.5914, + "step": 24074 + }, + { + "epoch": 0.9934389700420896, + "grad_norm": 17.89570077555441, + "learning_rate": 3.386736828387971e-10, + "loss": 0.5046, + "step": 24075 + }, + { + "epoch": 0.9934802343814475, + "grad_norm": 1.9631375234367878, + "learning_rate": 3.344271906923768e-10, + "loss": 0.5021, + "step": 24076 + }, + { + "epoch": 0.9935214987208055, + "grad_norm": 2.7013537396254783, + "learning_rate": 3.30207486324785e-10, + "loss": 0.4613, + "step": 24077 + }, + { + "epoch": 0.9935627630601634, + "grad_norm": 2.4505506374116126, + "learning_rate": 3.2601456981112833e-10, + "loss": 0.5583, + "step": 24078 + }, + { + "epoch": 0.9936040273995214, + "grad_norm": 4.061586736666066, + "learning_rate": 3.2184844122651323e-10, + "loss": 0.4841, + "step": 24079 + }, + { + "epoch": 0.9936452917388793, + "grad_norm": 3.085117025253156, + "learning_rate": 3.177091006453803e-10, + "loss": 0.5525, + "step": 24080 + }, + { + "epoch": 0.9936865560782372, + "grad_norm": 3.7441134067047503, + "learning_rate": 3.1359654814150375e-10, + "loss": 0.4554, + "step": 24081 + }, + { + "epoch": 0.9937278204175951, + "grad_norm": 3.0503683041895906, + "learning_rate": 3.095107837884914e-10, + "loss": 0.4883, + "step": 24082 + }, + { + "epoch": 0.993769084756953, + "grad_norm": 3.049750827631251, + "learning_rate": 3.0545180765928495e-10, + "loss": 0.5527, + "step": 24083 + }, + { + "epoch": 0.9938103490963109, + "grad_norm": 2.026774089176817, + "learning_rate": 3.014196198263264e-10, + "loss": 0.4667, + "step": 24084 + }, + { + "epoch": 0.9938516134356689, + "grad_norm": 5.789883499151279, + "learning_rate": 2.9741422036172473e-10, + "loss": 0.5439, + "step": 24085 + }, + { + "epoch": 0.9938928777750268, + "grad_norm": 3.662395847621133, + "learning_rate": 2.9343560933708935e-10, + "loss": 0.4913, + "step": 24086 + }, + { + "epoch": 0.9939341421143848, + "grad_norm": 5.255544659825113, + "learning_rate": 2.89483786823197e-10, + "loss": 0.5172, + "step": 24087 + }, + { + "epoch": 0.9939754064537427, + "grad_norm": 3.099092021978397, + "learning_rate": 2.8555875289099087e-10, + "loss": 0.5141, + "step": 24088 + }, + { + "epoch": 0.9940166707931006, + "grad_norm": 6.401354446805743, + "learning_rate": 2.816605076102485e-10, + "loss": 0.5144, + "step": 24089 + }, + { + "epoch": 0.9940579351324585, + "grad_norm": 3.2144990434071365, + "learning_rate": 2.7778905105091403e-10, + "loss": 0.5003, + "step": 24090 + }, + { + "epoch": 0.9940991994718165, + "grad_norm": 3.8821527222640686, + "learning_rate": 2.739443832817656e-10, + "loss": 0.4921, + "step": 24091 + }, + { + "epoch": 0.9941404638111744, + "grad_norm": 2.2365056856099694, + "learning_rate": 2.7012650437191475e-10, + "loss": 0.4603, + "step": 24092 + }, + { + "epoch": 0.9941817281505323, + "grad_norm": 3.5163288375374413, + "learning_rate": 2.663354143893071e-10, + "loss": 0.4959, + "step": 24093 + }, + { + "epoch": 0.9942229924898902, + "grad_norm": 2.642177023294394, + "learning_rate": 2.625711134017217e-10, + "loss": 0.5477, + "step": 24094 + }, + { + "epoch": 0.9942642568292481, + "grad_norm": 4.885772752722223, + "learning_rate": 2.5883360147643807e-10, + "loss": 0.5028, + "step": 24095 + }, + { + "epoch": 0.994305521168606, + "grad_norm": 5.259369541991264, + "learning_rate": 2.5512287868006966e-10, + "loss": 0.5363, + "step": 24096 + }, + { + "epoch": 0.9943467855079641, + "grad_norm": 3.8242161071446277, + "learning_rate": 2.514389450790633e-10, + "loss": 0.5158, + "step": 24097 + }, + { + "epoch": 0.994388049847322, + "grad_norm": 2.19594104230676, + "learning_rate": 2.477818007390331e-10, + "loss": 0.5502, + "step": 24098 + }, + { + "epoch": 0.9944293141866799, + "grad_norm": 2.49313975662057, + "learning_rate": 2.4415144572559333e-10, + "loss": 0.511, + "step": 24099 + }, + { + "epoch": 0.9944705785260378, + "grad_norm": 3.046840892443966, + "learning_rate": 2.4054788010335893e-10, + "loss": 0.5265, + "step": 24100 + }, + { + "epoch": 0.9945118428653957, + "grad_norm": 2.0987542470944343, + "learning_rate": 2.3697110393661183e-10, + "loss": 0.4919, + "step": 24101 + }, + { + "epoch": 0.9945531072047537, + "grad_norm": 3.344773131773877, + "learning_rate": 2.334211172896339e-10, + "loss": 0.4929, + "step": 24102 + }, + { + "epoch": 0.9945943715441116, + "grad_norm": 3.662555284767836, + "learning_rate": 2.298979202253748e-10, + "loss": 0.4992, + "step": 24103 + }, + { + "epoch": 0.9946356358834695, + "grad_norm": 3.0102106907040187, + "learning_rate": 2.264015128071173e-10, + "loss": 0.4948, + "step": 24104 + }, + { + "epoch": 0.9946769002228274, + "grad_norm": 2.317255384997021, + "learning_rate": 2.2293189509714485e-10, + "loss": 0.5487, + "step": 24105 + }, + { + "epoch": 0.9947181645621853, + "grad_norm": 2.4390662223771025, + "learning_rate": 2.1948906715757444e-10, + "loss": 0.5198, + "step": 24106 + }, + { + "epoch": 0.9947594289015433, + "grad_norm": 1.9565735594174156, + "learning_rate": 2.160730290496904e-10, + "loss": 0.4989, + "step": 24107 + }, + { + "epoch": 0.9948006932409013, + "grad_norm": 4.3156119419804675, + "learning_rate": 2.126837808346105e-10, + "loss": 0.4963, + "step": 24108 + }, + { + "epoch": 0.9948419575802592, + "grad_norm": 4.3405686596867605, + "learning_rate": 2.093213225729529e-10, + "loss": 0.5051, + "step": 24109 + }, + { + "epoch": 0.9948832219196171, + "grad_norm": 3.249436424000092, + "learning_rate": 2.0598565432483617e-10, + "loss": 0.506, + "step": 24110 + }, + { + "epoch": 0.994924486258975, + "grad_norm": 8.604304421603748, + "learning_rate": 2.0267677614954629e-10, + "loss": 0.509, + "step": 24111 + }, + { + "epoch": 0.9949657505983329, + "grad_norm": 2.2529437250483917, + "learning_rate": 1.9939468810636908e-10, + "loss": 0.5238, + "step": 24112 + }, + { + "epoch": 0.9950070149376908, + "grad_norm": 2.7181671710182904, + "learning_rate": 1.961393902540909e-10, + "loss": 0.4874, + "step": 24113 + }, + { + "epoch": 0.9950482792770488, + "grad_norm": 3.403741106449118, + "learning_rate": 1.9291088265049883e-10, + "loss": 0.5159, + "step": 24114 + }, + { + "epoch": 0.9950895436164067, + "grad_norm": 3.1720125928758396, + "learning_rate": 1.8970916535354654e-10, + "loss": 0.5305, + "step": 24115 + }, + { + "epoch": 0.9951308079557646, + "grad_norm": 3.3421342545213553, + "learning_rate": 1.86534238420355e-10, + "loss": 0.5559, + "step": 24116 + }, + { + "epoch": 0.9951720722951225, + "grad_norm": 1.877034024419706, + "learning_rate": 1.833861019077121e-10, + "loss": 0.555, + "step": 24117 + }, + { + "epoch": 0.9952133366344805, + "grad_norm": 3.1530306591250894, + "learning_rate": 1.802647558715731e-10, + "loss": 0.5431, + "step": 24118 + }, + { + "epoch": 0.9952546009738384, + "grad_norm": 3.6263183662832947, + "learning_rate": 1.7717020036805976e-10, + "loss": 0.4845, + "step": 24119 + }, + { + "epoch": 0.9952958653131964, + "grad_norm": 3.36193094674368, + "learning_rate": 1.7410243545212812e-10, + "loss": 0.5079, + "step": 24120 + }, + { + "epoch": 0.9953371296525543, + "grad_norm": 4.168453435541943, + "learning_rate": 1.7106146117873423e-10, + "loss": 0.4649, + "step": 24121 + }, + { + "epoch": 0.9953783939919122, + "grad_norm": 4.086058413296332, + "learning_rate": 1.6804727760216797e-10, + "loss": 0.4695, + "step": 24122 + }, + { + "epoch": 0.9954196583312701, + "grad_norm": 2.430441983490805, + "learning_rate": 1.6505988477638622e-10, + "loss": 0.4708, + "step": 24123 + }, + { + "epoch": 0.995460922670628, + "grad_norm": 3.263305617114896, + "learning_rate": 1.6209928275451313e-10, + "loss": 0.4781, + "step": 24124 + }, + { + "epoch": 0.9955021870099859, + "grad_norm": 9.212974926149865, + "learning_rate": 1.5916547158967287e-10, + "loss": 0.5081, + "step": 24125 + }, + { + "epoch": 0.9955434513493439, + "grad_norm": 4.166081017850758, + "learning_rate": 1.5625845133415696e-10, + "loss": 0.538, + "step": 24126 + }, + { + "epoch": 0.9955847156887018, + "grad_norm": 4.927237822049617, + "learning_rate": 1.533782220399238e-10, + "loss": 0.4655, + "step": 24127 + }, + { + "epoch": 0.9956259800280598, + "grad_norm": 6.453657548891971, + "learning_rate": 1.5052478375826573e-10, + "loss": 0.4874, + "step": 24128 + }, + { + "epoch": 0.9956672443674177, + "grad_norm": 3.401655989685321, + "learning_rate": 1.476981365403085e-10, + "loss": 0.5503, + "step": 24129 + }, + { + "epoch": 0.9957085087067756, + "grad_norm": 6.838242837244326, + "learning_rate": 1.448982804366783e-10, + "loss": 0.5416, + "step": 24130 + }, + { + "epoch": 0.9957497730461335, + "grad_norm": 2.7110851306870827, + "learning_rate": 1.4212521549716862e-10, + "loss": 0.5337, + "step": 24131 + }, + { + "epoch": 0.9957910373854915, + "grad_norm": 5.508869127833239, + "learning_rate": 1.3937894177123988e-10, + "loss": 0.5108, + "step": 24132 + }, + { + "epoch": 0.9958323017248494, + "grad_norm": 2.6535064704059144, + "learning_rate": 1.3665945930818602e-10, + "loss": 0.5264, + "step": 24133 + }, + { + "epoch": 0.9958735660642073, + "grad_norm": 1.9964859464428173, + "learning_rate": 1.3396676815646824e-10, + "loss": 0.5015, + "step": 24134 + }, + { + "epoch": 0.9959148304035652, + "grad_norm": 2.2653076898016513, + "learning_rate": 1.3130086836421474e-10, + "loss": 0.4454, + "step": 24135 + }, + { + "epoch": 0.9959560947429231, + "grad_norm": 4.03798066385001, + "learning_rate": 1.2866175997888751e-10, + "loss": 0.534, + "step": 24136 + }, + { + "epoch": 0.995997359082281, + "grad_norm": 5.195153137795178, + "learning_rate": 1.2604944304794862e-10, + "loss": 0.4813, + "step": 24137 + }, + { + "epoch": 0.9960386234216391, + "grad_norm": 2.710821381323227, + "learning_rate": 1.2346391761786092e-10, + "loss": 0.4945, + "step": 24138 + }, + { + "epoch": 0.996079887760997, + "grad_norm": 5.961418225933989, + "learning_rate": 1.2090518373475413e-10, + "loss": 0.5382, + "step": 24139 + }, + { + "epoch": 0.9961211521003549, + "grad_norm": 2.6144784843431577, + "learning_rate": 1.1837324144425842e-10, + "loss": 0.4507, + "step": 24140 + }, + { + "epoch": 0.9961624164397128, + "grad_norm": 3.084453632424903, + "learning_rate": 1.15868090792004e-10, + "loss": 0.4587, + "step": 24141 + }, + { + "epoch": 0.9962036807790707, + "grad_norm": 4.081550632049249, + "learning_rate": 1.1338973182228873e-10, + "loss": 0.4935, + "step": 24142 + }, + { + "epoch": 0.9962449451184286, + "grad_norm": 3.4913049738406965, + "learning_rate": 1.109381645797436e-10, + "loss": 0.4853, + "step": 24143 + }, + { + "epoch": 0.9962862094577866, + "grad_norm": 5.011002342541699, + "learning_rate": 1.0851338910783382e-10, + "loss": 0.5108, + "step": 24144 + }, + { + "epoch": 0.9963274737971445, + "grad_norm": 7.1061572254413665, + "learning_rate": 1.0611540545019116e-10, + "loss": 0.4558, + "step": 24145 + }, + { + "epoch": 0.9963687381365024, + "grad_norm": 3.3608311902949324, + "learning_rate": 1.0374421364928166e-10, + "loss": 0.4976, + "step": 24146 + }, + { + "epoch": 0.9964100024758603, + "grad_norm": 4.9259443725530465, + "learning_rate": 1.0139981374790441e-10, + "loss": 0.5158, + "step": 24147 + }, + { + "epoch": 0.9964512668152183, + "grad_norm": 7.0889944931344, + "learning_rate": 9.908220578752624e-11, + "loss": 0.4688, + "step": 24148 + }, + { + "epoch": 0.9964925311545763, + "grad_norm": 6.3292728706764585, + "learning_rate": 9.679138980978052e-11, + "loss": 0.4947, + "step": 24149 + }, + { + "epoch": 0.9965337954939342, + "grad_norm": 3.027572349412785, + "learning_rate": 9.452736585546795e-11, + "loss": 0.521, + "step": 24150 + }, + { + "epoch": 0.9965750598332921, + "grad_norm": 2.051848965976768, + "learning_rate": 9.229013396522268e-11, + "loss": 0.4766, + "step": 24151 + }, + { + "epoch": 0.99661632417265, + "grad_norm": 3.21299717514257, + "learning_rate": 9.007969417867968e-11, + "loss": 0.5416, + "step": 24152 + }, + { + "epoch": 0.9966575885120079, + "grad_norm": 3.542427163919044, + "learning_rate": 8.789604653564042e-11, + "loss": 0.5136, + "step": 24153 + }, + { + "epoch": 0.9966988528513658, + "grad_norm": 6.154738925937224, + "learning_rate": 8.573919107490724e-11, + "loss": 0.5301, + "step": 24154 + }, + { + "epoch": 0.9967401171907238, + "grad_norm": 2.068519188143879, + "learning_rate": 8.360912783511588e-11, + "loss": 0.4788, + "step": 24155 + }, + { + "epoch": 0.9967813815300817, + "grad_norm": 2.8035395206155886, + "learning_rate": 8.150585685440248e-11, + "loss": 0.5063, + "step": 24156 + }, + { + "epoch": 0.9968226458694396, + "grad_norm": 4.378154254431579, + "learning_rate": 7.942937817007056e-11, + "loss": 0.4331, + "step": 24157 + }, + { + "epoch": 0.9968639102087976, + "grad_norm": 3.205897591782328, + "learning_rate": 7.737969181942361e-11, + "loss": 0.4642, + "step": 24158 + }, + { + "epoch": 0.9969051745481555, + "grad_norm": 4.267049456376154, + "learning_rate": 7.535679783893246e-11, + "loss": 0.5101, + "step": 24159 + }, + { + "epoch": 0.9969464388875134, + "grad_norm": 3.238405201589983, + "learning_rate": 7.336069626490138e-11, + "loss": 0.4673, + "step": 24160 + }, + { + "epoch": 0.9969877032268714, + "grad_norm": 3.258238572392362, + "learning_rate": 7.139138713280202e-11, + "loss": 0.4765, + "step": 24161 + }, + { + "epoch": 0.9970289675662293, + "grad_norm": 12.108036663294698, + "learning_rate": 6.944887047793946e-11, + "loss": 0.4987, + "step": 24162 + }, + { + "epoch": 0.9970702319055872, + "grad_norm": 2.485417079206152, + "learning_rate": 6.753314633495267e-11, + "loss": 0.5557, + "step": 24163 + }, + { + "epoch": 0.9971114962449451, + "grad_norm": 2.980499132696474, + "learning_rate": 6.564421473814753e-11, + "loss": 0.4787, + "step": 24164 + }, + { + "epoch": 0.997152760584303, + "grad_norm": 5.483022399862541, + "learning_rate": 6.378207572099726e-11, + "loss": 0.5175, + "step": 24165 + }, + { + "epoch": 0.9971940249236609, + "grad_norm": 3.1816358100652176, + "learning_rate": 6.194672931714162e-11, + "loss": 0.4463, + "step": 24166 + }, + { + "epoch": 0.9972352892630189, + "grad_norm": 3.651615064518835, + "learning_rate": 6.013817555922119e-11, + "loss": 0.5846, + "step": 24167 + }, + { + "epoch": 0.9972765536023769, + "grad_norm": 4.0232977160239445, + "learning_rate": 5.83564144793769e-11, + "loss": 0.4875, + "step": 24168 + }, + { + "epoch": 0.9973178179417348, + "grad_norm": 3.774302084116356, + "learning_rate": 5.660144610974971e-11, + "loss": 0.4776, + "step": 24169 + }, + { + "epoch": 0.9973590822810927, + "grad_norm": 3.997182580685901, + "learning_rate": 5.4873270481314854e-11, + "loss": 0.5255, + "step": 24170 + }, + { + "epoch": 0.9974003466204506, + "grad_norm": 2.8706511119904077, + "learning_rate": 5.317188762521408e-11, + "loss": 0.5023, + "step": 24171 + }, + { + "epoch": 0.9974416109598085, + "grad_norm": 2.040897535037812, + "learning_rate": 5.1497297571756474e-11, + "loss": 0.5145, + "step": 24172 + }, + { + "epoch": 0.9974828752991665, + "grad_norm": 2.7574888180068617, + "learning_rate": 4.9849500350918066e-11, + "loss": 0.495, + "step": 24173 + }, + { + "epoch": 0.9975241396385244, + "grad_norm": 2.9037062526078086, + "learning_rate": 4.8228495991842205e-11, + "loss": 0.4396, + "step": 24174 + }, + { + "epoch": 0.9975654039778823, + "grad_norm": 2.8225777220890107, + "learning_rate": 4.663428452400531e-11, + "loss": 0.479, + "step": 24175 + }, + { + "epoch": 0.9976066683172402, + "grad_norm": 3.5033462243575477, + "learning_rate": 4.5066865975385004e-11, + "loss": 0.487, + "step": 24176 + }, + { + "epoch": 0.9976479326565981, + "grad_norm": 3.4742876159773863, + "learning_rate": 4.352624037429198e-11, + "loss": 0.5082, + "step": 24177 + }, + { + "epoch": 0.997689196995956, + "grad_norm": 1.8472040234886142, + "learning_rate": 4.2012407748037715e-11, + "loss": 0.4898, + "step": 24178 + }, + { + "epoch": 0.9977304613353141, + "grad_norm": 5.318404275106382, + "learning_rate": 4.0525368123767173e-11, + "loss": 0.4838, + "step": 24179 + }, + { + "epoch": 0.997771725674672, + "grad_norm": 2.6192425272772315, + "learning_rate": 3.906512152812569e-11, + "loss": 0.4904, + "step": 24180 + }, + { + "epoch": 0.9978129900140299, + "grad_norm": 3.0969121515593185, + "learning_rate": 3.76316679870925e-11, + "loss": 0.4788, + "step": 24181 + }, + { + "epoch": 0.9978542543533878, + "grad_norm": 2.866072727872116, + "learning_rate": 3.6225007526147215e-11, + "loss": 0.5353, + "step": 24182 + }, + { + "epoch": 0.9978955186927457, + "grad_norm": 13.93797210123134, + "learning_rate": 3.4845140170769454e-11, + "loss": 0.4948, + "step": 24183 + }, + { + "epoch": 0.9979367830321036, + "grad_norm": 2.6814477944836383, + "learning_rate": 3.34920659452731e-11, + "loss": 0.5408, + "step": 24184 + }, + { + "epoch": 0.9979780473714616, + "grad_norm": 3.181371220280423, + "learning_rate": 3.2165784873972035e-11, + "loss": 0.5365, + "step": 24185 + }, + { + "epoch": 0.9980193117108195, + "grad_norm": 3.2999314662419925, + "learning_rate": 3.0866296980514016e-11, + "loss": 0.5369, + "step": 24186 + }, + { + "epoch": 0.9980605760501774, + "grad_norm": 4.26150610493937, + "learning_rate": 2.959360228821373e-11, + "loss": 0.4917, + "step": 24187 + }, + { + "epoch": 0.9981018403895353, + "grad_norm": 1.8852998123710267, + "learning_rate": 2.8347700819719713e-11, + "loss": 0.4764, + "step": 24188 + }, + { + "epoch": 0.9981431047288933, + "grad_norm": 9.220352438148124, + "learning_rate": 2.7128592597180923e-11, + "loss": 0.5216, + "step": 24189 + }, + { + "epoch": 0.9981843690682513, + "grad_norm": 3.456645934932248, + "learning_rate": 2.5936277642579774e-11, + "loss": 0.5134, + "step": 24190 + }, + { + "epoch": 0.9982256334076092, + "grad_norm": 2.289796960806993, + "learning_rate": 2.4770755977066016e-11, + "loss": 0.5054, + "step": 24191 + }, + { + "epoch": 0.9982668977469671, + "grad_norm": 2.2435107290753304, + "learning_rate": 2.3632027621456332e-11, + "loss": 0.4771, + "step": 24192 + }, + { + "epoch": 0.998308162086325, + "grad_norm": 2.5258926689843886, + "learning_rate": 2.2520092596234333e-11, + "loss": 0.5475, + "step": 24193 + }, + { + "epoch": 0.9983494264256829, + "grad_norm": 3.398146903335672, + "learning_rate": 2.1434950921050967e-11, + "loss": 0.559, + "step": 24194 + }, + { + "epoch": 0.9983906907650408, + "grad_norm": 2.570392911327759, + "learning_rate": 2.0376602615557182e-11, + "loss": 0.4969, + "step": 24195 + }, + { + "epoch": 0.9984319551043987, + "grad_norm": 1.9505006253417692, + "learning_rate": 1.9345047698404728e-11, + "loss": 0.5203, + "step": 24196 + }, + { + "epoch": 0.9984732194437567, + "grad_norm": 3.1208963285482554, + "learning_rate": 1.834028618824535e-11, + "loss": 0.5229, + "step": 24197 + }, + { + "epoch": 0.9985144837831146, + "grad_norm": 3.0762321760797624, + "learning_rate": 1.7362318102898123e-11, + "loss": 0.4782, + "step": 24198 + }, + { + "epoch": 0.9985557481224726, + "grad_norm": 2.70793519762175, + "learning_rate": 1.641114345968253e-11, + "loss": 0.5355, + "step": 24199 + }, + { + "epoch": 0.9985970124618305, + "grad_norm": 2.2644411394959585, + "learning_rate": 1.5486762275918053e-11, + "loss": 0.4499, + "step": 24200 + }, + { + "epoch": 0.9986382768011884, + "grad_norm": 5.397025119562138, + "learning_rate": 1.4589174567924967e-11, + "loss": 0.487, + "step": 24201 + }, + { + "epoch": 0.9986795411405464, + "grad_norm": 4.2344104189875535, + "learning_rate": 1.3718380351690485e-11, + "loss": 0.4577, + "step": 24202 + }, + { + "epoch": 0.9987208054799043, + "grad_norm": 2.528243663841407, + "learning_rate": 1.287437964286875e-11, + "loss": 0.5357, + "step": 24203 + }, + { + "epoch": 0.9987620698192622, + "grad_norm": 2.4476948992597123, + "learning_rate": 1.2057172456447774e-11, + "loss": 0.5425, + "step": 24204 + }, + { + "epoch": 0.9988033341586201, + "grad_norm": 3.014629613201694, + "learning_rate": 1.12667588070825e-11, + "loss": 0.5295, + "step": 24205 + }, + { + "epoch": 0.998844598497978, + "grad_norm": 12.060628427047416, + "learning_rate": 1.0503138708928273e-11, + "loss": 0.4878, + "step": 24206 + }, + { + "epoch": 0.9988858628373359, + "grad_norm": 30.608944092496998, + "learning_rate": 9.766312175640834e-12, + "loss": 0.5486, + "step": 24207 + }, + { + "epoch": 0.9989271271766939, + "grad_norm": 4.298095943134594, + "learning_rate": 9.056279220209796e-12, + "loss": 0.5101, + "step": 24208 + }, + { + "epoch": 0.9989683915160519, + "grad_norm": 3.203227879273061, + "learning_rate": 8.373039855624765e-12, + "loss": 0.5613, + "step": 24209 + }, + { + "epoch": 0.9990096558554098, + "grad_norm": 2.809651417833516, + "learning_rate": 7.716594093709616e-12, + "loss": 0.5039, + "step": 24210 + }, + { + "epoch": 0.9990509201947677, + "grad_norm": 6.950973820570636, + "learning_rate": 7.086941946454762e-12, + "loss": 0.5164, + "step": 24211 + }, + { + "epoch": 0.9990921845341256, + "grad_norm": 5.6584772271383255, + "learning_rate": 6.4840834250179394e-12, + "loss": 0.5444, + "step": 24212 + }, + { + "epoch": 0.9991334488734835, + "grad_norm": 4.294827352352202, + "learning_rate": 5.908018540223825e-12, + "loss": 0.5258, + "step": 24213 + }, + { + "epoch": 0.9991747132128415, + "grad_norm": 3.690418339607008, + "learning_rate": 5.358747302397493e-12, + "loss": 0.4879, + "step": 24214 + }, + { + "epoch": 0.9992159775521994, + "grad_norm": 2.755092556909517, + "learning_rate": 4.8362697211978835e-12, + "loss": 0.5813, + "step": 24215 + }, + { + "epoch": 0.9992572418915573, + "grad_norm": 3.994045719561216, + "learning_rate": 4.34058580595087e-12, + "loss": 0.5521, + "step": 24216 + }, + { + "epoch": 0.9992985062309152, + "grad_norm": 4.400933759552067, + "learning_rate": 3.871695565649258e-12, + "loss": 0.5166, + "step": 24217 + }, + { + "epoch": 0.9993397705702731, + "grad_norm": 9.17720223359662, + "learning_rate": 3.4295990086197217e-12, + "loss": 0.5254, + "step": 24218 + }, + { + "epoch": 0.9993810349096311, + "grad_norm": 3.0784931576562156, + "learning_rate": 3.014296142689332e-12, + "loss": 0.4873, + "step": 24219 + }, + { + "epoch": 0.9994222992489891, + "grad_norm": 4.870360359100357, + "learning_rate": 2.625786975518629e-12, + "loss": 0.5173, + "step": 24220 + }, + { + "epoch": 0.999463563588347, + "grad_norm": 2.0097532811513643, + "learning_rate": 2.2640715137689505e-12, + "loss": 0.4668, + "step": 24221 + }, + { + "epoch": 0.9995048279277049, + "grad_norm": 9.737109108336519, + "learning_rate": 1.9291497639351006e-12, + "loss": 0.5286, + "step": 24222 + }, + { + "epoch": 0.9995460922670628, + "grad_norm": 4.6650671824907795, + "learning_rate": 1.6210217321788179e-12, + "loss": 0.4644, + "step": 24223 + }, + { + "epoch": 0.9995873566064207, + "grad_norm": 2.324621655557997, + "learning_rate": 1.3396874238291723e-12, + "loss": 0.54, + "step": 24224 + }, + { + "epoch": 0.9996286209457786, + "grad_norm": 5.838119193105999, + "learning_rate": 1.0851468440487011e-12, + "loss": 0.4904, + "step": 24225 + }, + { + "epoch": 0.9996698852851366, + "grad_norm": 3.2764550148899443, + "learning_rate": 8.573999971672741e-13, + "loss": 0.5017, + "step": 24226 + }, + { + "epoch": 0.9997111496244945, + "grad_norm": 3.3187178049147885, + "learning_rate": 6.564468875147611e-13, + "loss": 0.5151, + "step": 24227 + }, + { + "epoch": 0.9997524139638524, + "grad_norm": 6.989542012645106, + "learning_rate": 4.822875185883646e-13, + "loss": 0.4949, + "step": 24228 + }, + { + "epoch": 0.9997936783032104, + "grad_norm": 4.595648716363855, + "learning_rate": 3.349218933856868e-13, + "loss": 0.5013, + "step": 24229 + }, + { + "epoch": 0.9998349426425683, + "grad_norm": 2.6281328000564437, + "learning_rate": 2.143500145712629e-13, + "loss": 0.4909, + "step": 24230 + }, + { + "epoch": 0.9998762069819263, + "grad_norm": 3.3897349170056907, + "learning_rate": 1.2057188447656132e-13, + "loss": 0.5252, + "step": 24231 + }, + { + "epoch": 0.9999174713212842, + "grad_norm": 3.6012509955588383, + "learning_rate": 5.3587504600383087e-14, + "loss": 0.5062, + "step": 24232 + }, + { + "epoch": 0.9999587356606421, + "grad_norm": 25.236788776219637, + "learning_rate": 1.3396876274995862e-14, + "loss": 0.4926, + "step": 24233 + }, + { + "epoch": 1.0, + "grad_norm": 3.3151724867599808, + "learning_rate": 0.0, + "loss": 0.5041, + "step": 24234 + }, + { + "epoch": 1.0, + "step": 24234, + "total_flos": 7309385426403328.0, + "train_loss": 0.5402602604895109, + "train_runtime": 59745.2521, + "train_samples_per_second": 51.918, + "train_steps_per_second": 0.406 + } + ], + "logging_steps": 1.0, + "max_steps": 24234, + "num_input_tokens_seen": 0, + "num_train_epochs": 1, + "save_steps": 5000, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 7309385426403328.0, + "train_batch_size": 4, + "trial_name": null, + "trial_params": null +}