| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9979409746053536, | |
| "eval_steps": 100, | |
| "global_step": 1092, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.0, | |
| "loss": 0.7252, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.0, | |
| "loss": 0.7311, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.0, | |
| "loss": 0.7542, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.0, | |
| "loss": 0.7189, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0, | |
| "loss": 0.7314, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0, | |
| "loss": 0.7629, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.0, | |
| "loss": 0.7358, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.5384615384615387e-06, | |
| "loss": 0.7348, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 2.307692307692308e-06, | |
| "loss": 0.7438, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 3.846153846153847e-06, | |
| "loss": 0.7021, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 5.3846153846153855e-06, | |
| "loss": 0.684, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 6.923076923076923e-06, | |
| "loss": 0.6652, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 8.461538461538462e-06, | |
| "loss": 0.6302, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6212, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.153846153846154e-05, | |
| "loss": 0.5972, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.3076923076923078e-05, | |
| "loss": 0.6068, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.4615384615384617e-05, | |
| "loss": 0.5911, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.6153846153846154e-05, | |
| "loss": 0.6242, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.7692307692307694e-05, | |
| "loss": 0.5971, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.923076923076923e-05, | |
| "loss": 0.6028, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.0769230769230772e-05, | |
| "loss": 0.5916, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.230769230769231e-05, | |
| "loss": 0.5942, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2.384615384615385e-05, | |
| "loss": 0.5751, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2.5384615384615383e-05, | |
| "loss": 0.5544, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 2.6923076923076923e-05, | |
| "loss": 0.581, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 2.846153846153846e-05, | |
| "loss": 0.5676, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 3e-05, | |
| "loss": 0.5969, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 3.153846153846154e-05, | |
| "loss": 0.5501, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 3.307692307692308e-05, | |
| "loss": 0.6018, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 3.461538461538462e-05, | |
| "loss": 0.5452, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 3.615384615384615e-05, | |
| "loss": 0.5271, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 3.769230769230769e-05, | |
| "loss": 0.5893, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 3.923076923076923e-05, | |
| "loss": 0.59, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.0769230769230773e-05, | |
| "loss": 0.5464, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.230769230769231e-05, | |
| "loss": 0.5732, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.384615384615385e-05, | |
| "loss": 0.556, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.538461538461539e-05, | |
| "loss": 0.5391, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.692307692307693e-05, | |
| "loss": 0.5504, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.846153846153846e-05, | |
| "loss": 0.5403, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 5e-05, | |
| "loss": 0.5598, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.999953212751255e-05, | |
| "loss": 0.5541, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.999812852756259e-05, | |
| "loss": 0.5621, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.999578925268656e-05, | |
| "loss": 0.5448, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.999251439044307e-05, | |
| "loss": 0.5271, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.998830406340954e-05, | |
| "loss": 0.5689, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.998315842917767e-05, | |
| "loss": 0.5448, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.997707768034752e-05, | |
| "loss": 0.5657, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.997006204452029e-05, | |
| "loss": 0.5593, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.996211178428982e-05, | |
| "loss": 0.586, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.9953227197232755e-05, | |
| "loss": 0.5443, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "eval_loss": 0.5578761696815491, | |
| "eval_runtime": 405.967, | |
| "eval_samples_per_second": 24.174, | |
| "eval_steps_per_second": 0.19, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.9943408615897404e-05, | |
| "loss": 0.5368, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.993265640779129e-05, | |
| "loss": 0.5516, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.99209709753674e-05, | |
| "loss": 0.5334, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.990835275600913e-05, | |
| "loss": 0.5467, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.989480222201387e-05, | |
| "loss": 0.5644, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.988031988057541e-05, | |
| "loss": 0.5657, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.9864906273764864e-05, | |
| "loss": 0.5275, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.984856197851045e-05, | |
| "loss": 0.5513, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.983128760657584e-05, | |
| "loss": 0.5472, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.981308380453732e-05, | |
| "loss": 0.5543, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.979395125375952e-05, | |
| "loss": 0.5625, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.977389067036998e-05, | |
| "loss": 0.5687, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.9752902805232306e-05, | |
| "loss": 0.5779, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.973098844391807e-05, | |
| "loss": 0.5339, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.9708148406677415e-05, | |
| "loss": 0.5633, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.968438354840834e-05, | |
| "loss": 0.5479, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.965969475862471e-05, | |
| "loss": 0.5428, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.963408296142297e-05, | |
| "loss": 0.5403, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.960754911544753e-05, | |
| "loss": 0.5488, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.9580094213854935e-05, | |
| "loss": 0.5434, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.9551719284276604e-05, | |
| "loss": 0.5652, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.9522425388780466e-05, | |
| "loss": 0.5371, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.949221362383114e-05, | |
| "loss": 0.5247, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.946108512024891e-05, | |
| "loss": 0.5389, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.942904104316741e-05, | |
| "loss": 0.5153, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.939608259199001e-05, | |
| "loss": 0.5274, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.93622110003449e-05, | |
| "loss": 0.5311, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.932742753603896e-05, | |
| "loss": 0.5356, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.929173350101025e-05, | |
| "loss": 0.5508, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.925513023127934e-05, | |
| "loss": 0.5431, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.921761909689927e-05, | |
| "loss": 0.5685, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.917920150190424e-05, | |
| "loss": 0.5334, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.913987888425712e-05, | |
| "loss": 0.5454, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.909965271579557e-05, | |
| "loss": 0.5622, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.9058524502177005e-05, | |
| "loss": 0.5206, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.9016495782822185e-05, | |
| "loss": 0.5381, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.897356813085763e-05, | |
| "loss": 0.5245, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.892974315305674e-05, | |
| "loss": 0.5363, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.8885022489779594e-05, | |
| "loss": 0.5575, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.8839407814911646e-05, | |
| "loss": 0.5195, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.879290083580101e-05, | |
| "loss": 0.5377, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.874550329319457e-05, | |
| "loss": 0.5259, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.869721696117281e-05, | |
| "loss": 0.5287, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.864804364708343e-05, | |
| "loss": 0.5392, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.859798519147369e-05, | |
| "loss": 0.539, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.8547043468021535e-05, | |
| "loss": 0.5475, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.849522038346543e-05, | |
| "loss": 0.5285, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.8442517877533014e-05, | |
| "loss": 0.5643, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.838893792286847e-05, | |
| "loss": 0.54, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.8334482524958766e-05, | |
| "loss": 0.5173, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "eval_loss": 0.5429729223251343, | |
| "eval_runtime": 372.2276, | |
| "eval_samples_per_second": 26.366, | |
| "eval_steps_per_second": 0.207, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.827915372205847e-05, | |
| "loss": 0.5277, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.8222953585113576e-05, | |
| "loss": 0.5166, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.8165884217683885e-05, | |
| "loss": 0.5486, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.810794775586438e-05, | |
| "loss": 0.5249, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.804914636820517e-05, | |
| "loss": 0.5439, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.798948225563037e-05, | |
| "loss": 0.5612, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.792895765135572e-05, | |
| "loss": 0.5426, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.7867574820805003e-05, | |
| "loss": 0.5313, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.780533606152522e-05, | |
| "loss": 0.5669, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.7742243703100626e-05, | |
| "loss": 0.5544, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 4.767830010706551e-05, | |
| "loss": 0.5239, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 4.761350766681582e-05, | |
| "loss": 0.5711, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.754786880751957e-05, | |
| "loss": 0.5363, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.7481385986026075e-05, | |
| "loss": 0.548, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.7414061690773967e-05, | |
| "loss": 0.5308, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.73458984416981e-05, | |
| "loss": 0.5467, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.7276898790135185e-05, | |
| "loss": 0.5149, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.72070653187283e-05, | |
| "loss": 0.5403, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.713640064133025e-05, | |
| "loss": 0.5584, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.7064907402905705e-05, | |
| "loss": 0.5513, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.699258827943221e-05, | |
| "loss": 0.5197, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 4.6919445977800014e-05, | |
| "loss": 0.5389, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.6845483235710774e-05, | |
| "loss": 0.5373, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.6770702821575055e-05, | |
| "loss": 0.5302, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.669510753440873e-05, | |
| "loss": 0.5004, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.6618700203728196e-05, | |
| "loss": 0.513, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.654148368944449e-05, | |
| "loss": 0.5382, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.646346088175621e-05, | |
| "loss": 0.5376, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.638463470104139e-05, | |
| "loss": 0.5564, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.630500809774809e-05, | |
| "loss": 0.5468, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.6224584052284106e-05, | |
| "loss": 0.5559, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.614336557490526e-05, | |
| "loss": 0.5112, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.606135570560286e-05, | |
| "loss": 0.5295, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.5978557513989814e-05, | |
| "loss": 0.5489, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.5894974099185806e-05, | |
| "loss": 0.5569, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.581060858970124e-05, | |
| "loss": 0.5462, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.572546414332018e-05, | |
| "loss": 0.543, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.5639543946982144e-05, | |
| "loss": 0.5119, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.55528512166628e-05, | |
| "loss": 0.5203, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.546538919725364e-05, | |
| "loss": 0.5358, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.5377161162440445e-05, | |
| "loss": 0.5373, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.528817041458084e-05, | |
| "loss": 0.5436, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.519842028458062e-05, | |
| "loss": 0.5235, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.510791413176912e-05, | |
| "loss": 0.5087, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.501665534377345e-05, | |
| "loss": 0.5202, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.492464733639168e-05, | |
| "loss": 0.5374, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.483189355346506e-05, | |
| "loss": 0.5154, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.473839746674902e-05, | |
| "loss": 0.5269, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.46441625757833e-05, | |
| "loss": 0.561, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.454919240776093e-05, | |
| "loss": 0.5133, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "eval_loss": 0.5352138876914978, | |
| "eval_runtime": 372.3236, | |
| "eval_samples_per_second": 26.359, | |
| "eval_steps_per_second": 0.207, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.4453490517396215e-05, | |
| "loss": 0.5321, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.435706048679166e-05, | |
| "loss": 0.5605, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 4.4259905925303935e-05, | |
| "loss": 0.5195, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 4.416203046940875e-05, | |
| "loss": 0.5167, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 4.4063437782564745e-05, | |
| "loss": 0.4983, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.396413155507637e-05, | |
| "loss": 0.5192, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.386411550395576e-05, | |
| "loss": 0.5082, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.37633933727836e-05, | |
| "loss": 0.5406, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.366196893156902e-05, | |
| "loss": 0.5123, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.355984597660846e-05, | |
| "loss": 0.5366, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.3457028330343606e-05, | |
| "loss": 0.5418, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.335351984121829e-05, | |
| "loss": 0.5353, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.324932438353446e-05, | |
| "loss": 0.5181, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.314444585730713e-05, | |
| "loss": 0.5291, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.3038888188118475e-05, | |
| "loss": 0.5377, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.293265532697084e-05, | |
| "loss": 0.5304, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.282575125013884e-05, | |
| "loss": 0.5121, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.271817995902062e-05, | |
| "loss": 0.5247, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.260994547998795e-05, | |
| "loss": 0.5106, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.2501051864235636e-05, | |
| "loss": 0.4972, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.2391503187629836e-05, | |
| "loss": 0.5521, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.228130355055548e-05, | |
| "loss": 0.5126, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.217045707776285e-05, | |
| "loss": 0.5321, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.2058967918213125e-05, | |
| "loss": 0.5347, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.194684024492315e-05, | |
| "loss": 0.5411, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.1834078254809194e-05, | |
| "loss": 0.522, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.172068616852988e-05, | |
| "loss": 0.5115, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.16066682303282e-05, | |
| "loss": 0.5396, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.149202870787269e-05, | |
| "loss": 0.5215, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.13767718920976e-05, | |
| "loss": 0.508, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.1260902097042385e-05, | |
| "loss": 0.531, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.114442365969019e-05, | |
| "loss": 0.5266, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.10273409398055e-05, | |
| "loss": 0.5073, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.090965831977101e-05, | |
| "loss": 0.5128, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.079138020442351e-05, | |
| "loss": 0.5457, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.0672511020889104e-05, | |
| "loss": 0.526, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.055305521841743e-05, | |
| "loss": 0.5231, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.043301726821515e-05, | |
| "loss": 0.5354, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.0312401663278616e-05, | |
| "loss": 0.5275, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.019121291822569e-05, | |
| "loss": 0.5066, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.006945556912673e-05, | |
| "loss": 0.5434, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 3.9947134173334846e-05, | |
| "loss": 0.4798, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 3.9824253309315286e-05, | |
| "loss": 0.5124, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 3.97008175764741e-05, | |
| "loss": 0.523, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 3.9576831594985956e-05, | |
| "loss": 0.4993, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 3.945230000562121e-05, | |
| "loss": 0.4933, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 3.932722746957223e-05, | |
| "loss": 0.4813, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 3.920161866827889e-05, | |
| "loss": 0.5043, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 3.9075478303253396e-05, | |
| "loss": 0.4889, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 3.8948811095904234e-05, | |
| "loss": 0.5248, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "eval_loss": 0.5309346914291382, | |
| "eval_runtime": 372.3756, | |
| "eval_samples_per_second": 26.355, | |
| "eval_steps_per_second": 0.207, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 3.882162178735952e-05, | |
| "loss": 0.4794, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.869391513828951e-05, | |
| "loss": 0.5006, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.856569592872841e-05, | |
| "loss": 0.509, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 3.843696895789546e-05, | |
| "loss": 0.4963, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 3.83077390440153e-05, | |
| "loss": 0.4662, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 3.8178011024137636e-05, | |
| "loss": 0.46, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 3.804778975395618e-05, | |
| "loss": 0.4948, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 3.791708010762689e-05, | |
| "loss": 0.464, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 3.778588697758556e-05, | |
| "loss": 0.5009, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 3.7654215274364675e-05, | |
| "loss": 0.4557, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.752206992640962e-05, | |
| "loss": 0.4959, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.73894558798942e-05, | |
| "loss": 0.4421, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 3.7256378098535544e-05, | |
| "loss": 0.4353, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.712284156340824e-05, | |
| "loss": 0.4763, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 3.698885127275795e-05, | |
| "loss": 0.4794, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 3.685441224181434e-05, | |
| "loss": 0.4441, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 3.671952950260331e-05, | |
| "loss": 0.4641, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 3.658420810375866e-05, | |
| "loss": 0.4387, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 3.644845311033316e-05, | |
| "loss": 0.441, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 3.631226960360894e-05, | |
| "loss": 0.429, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 3.6175662680907265e-05, | |
| "loss": 0.4344, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 3.60386374553978e-05, | |
| "loss": 0.4528, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 3.5901199055907195e-05, | |
| "loss": 0.4319, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 3.576335262672711e-05, | |
| "loss": 0.45, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 3.5625103327421684e-05, | |
| "loss": 0.4411, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 3.54864563326344e-05, | |
| "loss": 0.4117, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 3.534741683189441e-05, | |
| "loss": 0.4585, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 3.5207990029422284e-05, | |
| "loss": 0.4313, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 3.50681811439352e-05, | |
| "loss": 0.4476, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 3.492799540845165e-05, | |
| "loss": 0.4558, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 3.478743807009552e-05, | |
| "loss": 0.4661, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 3.4646514389899755e-05, | |
| "loss": 0.4317, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 3.450522964260936e-05, | |
| "loss": 0.4353, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 3.436358911648403e-05, | |
| "loss": 0.4386, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 3.4221598113100195e-05, | |
| "loss": 0.4262, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 3.407926194715257e-05, | |
| "loss": 0.4343, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 3.393658594625523e-05, | |
| "loss": 0.4504, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 3.379357545074221e-05, | |
| "loss": 0.4502, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 3.365023581346762e-05, | |
| "loss": 0.4238, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 3.350657239960526e-05, | |
| "loss": 0.4433, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 3.3362590586447846e-05, | |
| "loss": 0.4298, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.3218295763205694e-05, | |
| "loss": 0.4424, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 3.307369333080504e-05, | |
| "loss": 0.4417, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 3.292878870168585e-05, | |
| "loss": 0.4561, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 3.278358729959929e-05, | |
| "loss": 0.4683, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 3.263809455940463e-05, | |
| "loss": 0.4342, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 3.24923159268659e-05, | |
| "loss": 0.4437, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 3.234625685844803e-05, | |
| "loss": 0.4343, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 3.219992282111256e-05, | |
| "loss": 0.4324, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 3.2053319292113114e-05, | |
| "loss": 0.4312, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "eval_loss": 0.5447221398353577, | |
| "eval_runtime": 372.5869, | |
| "eval_samples_per_second": 26.34, | |
| "eval_steps_per_second": 0.207, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 3.190645175879032e-05, | |
| "loss": 0.437, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 3.1759325718366414e-05, | |
| "loss": 0.4252, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 3.1611946677739515e-05, | |
| "loss": 0.4599, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 3.146432015327751e-05, | |
| "loss": 0.4204, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 3.131645167061152e-05, | |
| "loss": 0.4276, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 3.1168346764429126e-05, | |
| "loss": 0.4286, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 3.10200109782672e-05, | |
| "loss": 0.4038, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 3.087144986430442e-05, | |
| "loss": 0.4278, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 3.0722668983153394e-05, | |
| "loss": 0.4254, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 3.0573673903652615e-05, | |
| "loss": 0.4241, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 3.042447020265795e-05, | |
| "loss": 0.4487, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 3.027506346483395e-05, | |
| "loss": 0.4359, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 3.0125459282444755e-05, | |
| "loss": 0.4468, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 2.997566325514487e-05, | |
| "loss": 0.4352, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 2.982568098976947e-05, | |
| "loss": 0.4306, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 2.967551810012461e-05, | |
| "loss": 0.4406, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.9525180206777058e-05, | |
| "loss": 0.4278, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.9374672936843937e-05, | |
| "loss": 0.4227, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 2.9224001923782134e-05, | |
| "loss": 0.4191, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 2.907317280717736e-05, | |
| "loss": 0.4219, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.8922191232533137e-05, | |
| "loss": 0.4483, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.8771062851059456e-05, | |
| "loss": 0.4192, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.861979331946126e-05, | |
| "loss": 0.4188, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 2.8468388299726712e-05, | |
| "loss": 0.4305, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.8316853458915256e-05, | |
| "loss": 0.4191, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.816519446894555e-05, | |
| "loss": 0.435, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 2.8013417006383076e-05, | |
| "loss": 0.4341, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.7861526752227767e-05, | |
| "loss": 0.4346, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.7709529391701305e-05, | |
| "loss": 0.4205, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.7557430614034336e-05, | |
| "loss": 0.4578, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 2.740523611225354e-05, | |
| "loss": 0.4287, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.7252951582968523e-05, | |
| "loss": 0.4184, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.7100582726158608e-05, | |
| "loss": 0.4243, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.694813524495947e-05, | |
| "loss": 0.4133, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 2.6795614845449714e-05, | |
| "loss": 0.4318, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 2.6643027236437212e-05, | |
| "loss": 0.419, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.6490378129245498e-05, | |
| "loss": 0.4369, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.6337673237499988e-05, | |
| "loss": 0.4497, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.618491827691407e-05, | |
| "loss": 0.4435, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 2.6032118965075225e-05, | |
| "loss": 0.4288, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.5879281021230972e-05, | |
| "loss": 0.4417, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 2.572641016607484e-05, | |
| "loss": 0.4521, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 2.5573512121532207e-05, | |
| "loss": 0.4259, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 2.542059261054613e-05, | |
| "loss": 0.457, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 2.5267657356863188e-05, | |
| "loss": 0.4348, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.511471208481918e-05, | |
| "loss": 0.4357, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.49617625191249e-05, | |
| "loss": 0.4301, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.4808814384651834e-05, | |
| "loss": 0.429, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 2.4655873406217928e-05, | |
| "loss": 0.4215, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.4502945308373246e-05, | |
| "loss": 0.4257, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "eval_loss": 0.5429728627204895, | |
| "eval_runtime": 372.1195, | |
| "eval_samples_per_second": 26.373, | |
| "eval_steps_per_second": 0.207, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 2.435003581518577e-05, | |
| "loss": 0.4524, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 2.4197150650027086e-05, | |
| "loss": 0.4464, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 2.4044295535358195e-05, | |
| "loss": 0.4308, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.389147619251531e-05, | |
| "loss": 0.4212, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.3738698341495724e-05, | |
| "loss": 0.4358, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.358596770074369e-05, | |
| "loss": 0.4226, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 2.3433289986936398e-05, | |
| "loss": 0.4027, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 2.3280670914769972e-05, | |
| "loss": 0.4154, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 2.3128116196745605e-05, | |
| "loss": 0.438, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 2.297563154295575e-05, | |
| "loss": 0.4265, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.2823222660870337e-05, | |
| "loss": 0.4539, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 2.267089525512318e-05, | |
| "loss": 0.428, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.2518655027298464e-05, | |
| "loss": 0.4473, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 2.2366507675717314e-05, | |
| "loss": 0.4209, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 2.221445889522452e-05, | |
| "loss": 0.4155, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 2.2062514376975373e-05, | |
| "loss": 0.4486, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 2.191067980822266e-05, | |
| "loss": 0.4482, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.1758960872103733e-05, | |
| "loss": 0.4348, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 2.160736324742792e-05, | |
| "loss": 0.4422, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.1455892608463824e-05, | |
| "loss": 0.4081, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 2.1304554624727006e-05, | |
| "loss": 0.4191, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.1153354960767785e-05, | |
| "loss": 0.4343, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 2.1002299275959185e-05, | |
| "loss": 0.4211, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.085139322428514e-05, | |
| "loss": 0.4481, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 2.0700642454128815e-05, | |
| "loss": 0.4278, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.055005260806125e-05, | |
| "loss": 0.4126, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.0399629322630102e-05, | |
| "loss": 0.4152, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.024937822814871e-05, | |
| "loss": 0.4351, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 2.009930494848535e-05, | |
| "loss": 0.4102, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.994941510085271e-05, | |
| "loss": 0.4274, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.9799714295597657e-05, | |
| "loss": 0.4515, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.9650208135991227e-05, | |
| "loss": 0.4101, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.9500902218018946e-05, | |
| "loss": 0.4325, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.935180213017131e-05, | |
| "loss": 0.4496, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.9202913453234622e-05, | |
| "loss": 0.4326, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.9054241760082142e-05, | |
| "loss": 0.4157, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.8905792615465455e-05, | |
| "loss": 0.4022, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.8757571575806213e-05, | |
| "loss": 0.4185, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.8609584188988136e-05, | |
| "loss": 0.4238, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.8461835994149362e-05, | |
| "loss": 0.4265, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.8314332521475132e-05, | |
| "loss": 0.4182, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.816707929199077e-05, | |
| "loss": 0.4323, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.8020081817355066e-05, | |
| "loss": 0.4375, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.7873345599653946e-05, | |
| "loss": 0.4281, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.772687613119455e-05, | |
| "loss": 0.4274, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.7580678894299618e-05, | |
| "loss": 0.4267, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.7434759361102372e-05, | |
| "loss": 0.4402, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.7289122993341596e-05, | |
| "loss": 0.4347, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.714377524215725e-05, | |
| "loss": 0.4095, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.6998721547886465e-05, | |
| "loss": 0.4267, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "eval_loss": 0.5412114262580872, | |
| "eval_runtime": 372.1815, | |
| "eval_samples_per_second": 26.369, | |
| "eval_steps_per_second": 0.207, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.6853967339859842e-05, | |
| "loss": 0.4078, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.6709518036198308e-05, | |
| "loss": 0.4204, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.656537904361026e-05, | |
| "loss": 0.4402, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.6421555757189205e-05, | |
| "loss": 0.4218, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.627805356021187e-05, | |
| "loss": 0.428, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.613487782393661e-05, | |
| "loss": 0.4337, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.5992033907402482e-05, | |
| "loss": 0.4443, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.5849527157228565e-05, | |
| "loss": 0.4197, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 1.5707362907413868e-05, | |
| "loss": 0.4259, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.5565546479137676e-05, | |
| "loss": 0.432, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.5424083180560418e-05, | |
| "loss": 0.4272, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.528297830662491e-05, | |
| "loss": 0.4076, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.5142237138858221e-05, | |
| "loss": 0.4387, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.5001864945173972e-05, | |
| "loss": 0.4271, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.4861866979675154e-05, | |
| "loss": 0.4142, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.4722248482457484e-05, | |
| "loss": 0.4221, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.4583014679413242e-05, | |
| "loss": 0.4456, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.4444170782035699e-05, | |
| "loss": 0.4366, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.4305721987224008e-05, | |
| "loss": 0.4324, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.4167673477088739e-05, | |
| "loss": 0.4263, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.4030030418757892e-05, | |
| "loss": 0.44, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.3892797964183449e-05, | |
| "loss": 0.4129, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.3755981249948625e-05, | |
| "loss": 0.4423, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.3619585397075505e-05, | |
| "loss": 0.3938, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.3483615510833463e-05, | |
| "loss": 0.4129, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.3348076680548021e-05, | |
| "loss": 0.4318, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.3212973979410338e-05, | |
| "loss": 0.3967, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.3078312464287353e-05, | |
| "loss": 0.4087, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 1.2944097175532522e-05, | |
| "loss": 0.3822, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.2810333136797134e-05, | |
| "loss": 0.3914, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.267702535484225e-05, | |
| "loss": 0.3947, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.2544178819351376e-05, | |
| "loss": 0.4057, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.241179850274361e-05, | |
| "loss": 0.3814, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.2279889359987604e-05, | |
| "loss": 0.3955, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.2148456328416068e-05, | |
| "loss": 0.3999, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.2017504327540935e-05, | |
| "loss": 0.3896, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.1887038258869295e-05, | |
| "loss": 0.3685, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.175706300571986e-05, | |
| "loss": 0.358, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.162758343304023e-05, | |
| "loss": 0.379, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.1498604387224798e-05, | |
| "loss": 0.3678, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.1370130695933318e-05, | |
| "loss": 0.3808, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.1242167167910216e-05, | |
| "loss": 0.3507, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.1114718592804637e-05, | |
| "loss": 0.3664, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.0987789740991143e-05, | |
| "loss": 0.3474, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.0861385363391117e-05, | |
| "loss": 0.3316, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.0735510191295025e-05, | |
| "loss": 0.3592, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 1.0610168936185245e-05, | |
| "loss": 0.3532, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.0485366289559765e-05, | |
| "loss": 0.3359, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.0361106922756574e-05, | |
| "loss": 0.3443, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.0237395486778775e-05, | |
| "loss": 0.3239, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "eval_loss": 0.5829917788505554, | |
| "eval_runtime": 372.4526, | |
| "eval_samples_per_second": 26.35, | |
| "eval_steps_per_second": 0.207, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.011423661212057e-05, | |
| "loss": 0.3277, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 9.991634908593864e-06, | |
| "loss": 0.3181, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 9.869594965155784e-06, | |
| "loss": 0.3135, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 9.748121349736892e-06, | |
| "loss": 0.3394, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 9.627218609070189e-06, | |
| "loss": 0.3148, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 9.506891268520943e-06, | |
| "loss": 0.3326, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 9.387143831917336e-06, | |
| "loss": 0.3272, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 9.26798078138186e-06, | |
| "loss": 0.306, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 9.149406577163528e-06, | |
| "loss": 0.3267, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 9.031425657470981e-06, | |
| "loss": 0.3185, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 8.914042438306319e-06, | |
| "loss": 0.321, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 8.797261313299845e-06, | |
| "loss": 0.3303, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 8.681086653545606e-06, | |
| "loss": 0.3526, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 8.565522807437743e-06, | |
| "loss": 0.3265, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 8.450574100507807e-06, | |
| "loss": 0.3084, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 8.336244835262778e-06, | |
| "loss": 0.3264, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 8.222539291024078e-06, | |
| "loss": 0.3155, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 8.109461723767384e-06, | |
| "loss": 0.3138, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 7.9970163659633e-06, | |
| "loss": 0.3248, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 7.885207426418959e-06, | |
| "loss": 0.3359, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 7.7740390901205e-06, | |
| "loss": 0.3113, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 7.663515518076416e-06, | |
| "loss": 0.3231, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 7.5536408471617744e-06, | |
| "loss": 0.3195, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 7.444419189963442e-06, | |
| "loss": 0.324, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 7.335854634626074e-06, | |
| "loss": 0.323, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 7.227951244699166e-06, | |
| "loss": 0.3383, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 7.120713058984918e-06, | |
| "loss": 0.3475, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 7.014144091387054e-06, | |
| "loss": 0.3262, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 6.9082483307606245e-06, | |
| "loss": 0.3239, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 6.803029740762648e-06, | |
| "loss": 0.3207, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 6.698492259703807e-06, | |
| "loss": 0.3224, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 6.5946398004010115e-06, | |
| "loss": 0.3119, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 6.491476250030934e-06, | |
| "loss": 0.3178, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 6.389005469984519e-06, | |
| "loss": 0.3105, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 6.28723129572247e-06, | |
| "loss": 0.3417, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 6.1861575366316895e-06, | |
| "loss": 0.3191, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 6.08578797588264e-06, | |
| "loss": 0.3116, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 5.986126370287826e-06, | |
| "loss": 0.3223, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 5.887176450161097e-06, | |
| "loss": 0.2942, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 5.788941919178078e-06, | |
| "loss": 0.3197, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 5.691426454237531e-06, | |
| "loss": 0.313, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 5.594633705323687e-06, | |
| "loss": 0.3172, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 5.4985672953697e-06, | |
| "loss": 0.3444, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 5.403230820121971e-06, | |
| "loss": 0.3197, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 5.308627848005618e-06, | |
| "loss": 0.3405, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 5.214761919990857e-06, | |
| "loss": 0.3252, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 5.121636549460523e-06, | |
| "loss": 0.3173, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 5.0292552220784995e-06, | |
| "loss": 0.3329, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 4.9376213956593116e-06, | |
| "loss": 0.3148, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 4.846738500038667e-06, | |
| "loss": 0.3171, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "eval_loss": 0.594931423664093, | |
| "eval_runtime": 372.4124, | |
| "eval_samples_per_second": 26.353, | |
| "eval_steps_per_second": 0.207, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 4.756609936945069e-06, | |
| "loss": 0.3116, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 4.667239079872532e-06, | |
| "loss": 0.3202, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 4.578629273954263e-06, | |
| "loss": 0.3431, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 4.490783835837479e-06, | |
| "loss": 0.3089, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 4.40370605355929e-06, | |
| "loss": 0.323, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 4.317399186423574e-06, | |
| "loss": 0.3153, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 4.231866464879014e-06, | |
| "loss": 0.318, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 4.147111090398193e-06, | |
| "loss": 0.3284, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 4.063136235357745e-06, | |
| "loss": 0.3277, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 3.979945042919603e-06, | |
| "loss": 0.3316, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 3.897540626913393e-06, | |
| "loss": 0.3272, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.815926071719828e-06, | |
| "loss": 0.3392, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 3.735104432155309e-06, | |
| "loss": 0.3284, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 3.655078733357567e-06, | |
| "loss": 0.3141, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 3.5758519706724086e-06, | |
| "loss": 0.3233, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 3.497427109541651e-06, | |
| "loss": 0.3226, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 3.4198070853920768e-06, | |
| "loss": 0.3259, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 3.3429948035255733e-06, | |
| "loss": 0.3292, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 3.266993139010438e-06, | |
| "loss": 0.3285, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 3.191804936573681e-06, | |
| "loss": 0.3301, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 3.1174330104946055e-06, | |
| "loss": 0.3446, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 3.0438801444994587e-06, | |
| "loss": 0.3361, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 2.9711490916572354e-06, | |
| "loss": 0.3387, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 2.8992425742766145e-06, | |
| "loss": 0.3555, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 2.828163283804097e-06, | |
| "loss": 0.328, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 2.7579138807232283e-06, | |
| "loss": 0.3501, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 2.6884969944550533e-06, | |
| "loss": 0.343, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 2.6199152232596753e-06, | |
| "loss": 0.3288, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 2.552171134138992e-06, | |
| "loss": 0.3377, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 2.4852672627406564e-06, | |
| "loss": 0.3251, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 2.41920611326312e-06, | |
| "loss": 0.3353, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 2.3539901583619185e-06, | |
| "loss": 0.3315, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 2.2896218390571546e-06, | |
| "loss": 0.3475, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 2.2261035646420764e-06, | |
| "loss": 0.3494, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 2.1634377125929166e-06, | |
| "loss": 0.3437, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 2.10162662847993e-06, | |
| "loss": 0.3336, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 2.040672625879575e-06, | |
| "loss": 0.3407, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 1.980577986287907e-06, | |
| "loss": 0.336, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 1.921344959035218e-06, | |
| "loss": 0.3192, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 1.8629757612017961e-06, | |
| "loss": 0.3173, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 1.8054725775349973e-06, | |
| "loss": 0.3434, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 1.748837560367425e-06, | |
| "loss": 0.3383, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 1.693072829536385e-06, | |
| "loss": 0.3597, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 1.6381804723045485e-06, | |
| "loss": 0.342, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 1.5841625432818057e-06, | |
| "loss": 0.3521, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 1.5310210643483813e-06, | |
| "loss": 0.3277, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 1.4787580245791631e-06, | |
| "loss": 0.3322, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 1.427375380169213e-06, | |
| "loss": 0.3552, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 1.3768750543605851e-06, | |
| "loss": 0.3639, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 1.3272589373703236e-06, | |
| "loss": 0.3437, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "eval_loss": 0.5839625000953674, | |
| "eval_runtime": 372.3615, | |
| "eval_samples_per_second": 26.356, | |
| "eval_steps_per_second": 0.207, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 1.2785288863197186e-06, | |
| "loss": 0.3565, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.2306867251647813e-06, | |
| "loss": 0.3187, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.1837342446279903e-06, | |
| "loss": 0.3312, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 1.1376732021312508e-06, | |
| "loss": 0.3494, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 1.0925053217301278e-06, | |
| "loss": 0.3399, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.048232294049309e-06, | |
| "loss": 0.3484, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 1.004855776219313e-06, | |
| "loss": 0.3534, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 9.623773918144897e-07, | |
| "loss": 0.3353, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 9.2079873079223e-07, | |
| "loss": 0.3362, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 8.801213494334464e-07, | |
| "loss": 0.3509, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.403467702843643e-07, | |
| "loss": 0.3282, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.014764820994807e-07, | |
| "loss": 0.3454, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 7.635119397858603e-07, | |
| "loss": 0.357, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 7.264545643486997e-07, | |
| "loss": 0.339, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 6.903057428381127e-07, | |
| "loss": 0.3577, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 6.550668282972155e-07, | |
| "loss": 0.3627, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 6.207391397115042e-07, | |
| "loss": 0.3554, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 5.8732396195946e-07, | |
| "loss": 0.3415, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 5.548225457644662e-07, | |
| "loss": 0.3328, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 5.232361076480035e-07, | |
| "loss": 0.3398, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 4.925658298840979e-07, | |
| "loss": 0.351, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 4.628128604550808e-07, | |
| "loss": 0.3402, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 4.3397831300862057e-07, | |
| "loss": 0.3467, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 4.060632668160286e-07, | |
| "loss": 0.3579, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 3.790687667318743e-07, | |
| "loss": 0.3643, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 3.5299582315487525e-07, | |
| "loss": 0.3507, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 3.278454119900576e-07, | |
| "loss": 0.349, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 3.0361847461226335e-07, | |
| "loss": 0.3555, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 2.8031591783088953e-07, | |
| "loss": 0.3669, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 2.579386138559514e-07, | |
| "loss": 0.3605, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 2.3648740026543658e-07, | |
| "loss": 0.3432, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 2.1596307997396037e-07, | |
| "loss": 0.3622, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.963664212027011e-07, | |
| "loss": 0.3309, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.7769815745066475e-07, | |
| "loss": 0.3553, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 1.5995898746720695e-07, | |
| "loss": 0.3621, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 1.4314957522589546e-07, | |
| "loss": 0.3567, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.2727054989965236e-07, | |
| "loss": 0.37, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.1232250583720072e-07, | |
| "loss": 0.3657, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 9.830600254082944e-08, | |
| "loss": 0.3695, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 8.522156464543518e-08, | |
| "loss": 0.353, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 7.306968189890184e-08, | |
| "loss": 0.3591, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 6.185080914375974e-08, | |
| "loss": 0.3629, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 5.156536630015474e-08, | |
| "loss": 0.3568, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 4.221373835014975e-08, | |
| "loss": 0.359, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 3.379627532329732e-08, | |
| "loss": 0.3634, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 2.631329228355017e-08, | |
| "loss": 0.359, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 1092, | |
| "total_flos": 1201247503384576.0, | |
| "train_loss": 0.44882795285610927, | |
| "train_runtime": 126278.8523, | |
| "train_samples_per_second": 4.43, | |
| "train_steps_per_second": 0.009 | |
| } | |
| ], | |
| "logging_steps": 2, | |
| "max_steps": 1092, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 1000, | |
| "total_flos": 1201247503384576.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |